diff --git a/Dockerfile b/Dockerfile index 3b61b06..a16c288 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Stage 1: Builder - Install dependencies -FROM node:20-alpine AS builder +FROM node:25-alpine AS builder # Install git (required for npm to install dependencies from GitHub) RUN apk add --no-cache git @@ -9,7 +9,7 @@ WORKDIR /app # Copy package.json and package-lock.json (if available) to leverage Docker cache # Use wildcards to ensure both package.json and package-lock.json (or yarn.lock/pnpm-lock.yaml) are copied -COPY package*.json ./ +COPY package.json ./ # Install production dependencies # This command automatically handles package-lock.json if it exists, otherwise it creates one. @@ -17,7 +17,7 @@ COPY package*.json ./ RUN npm install # Stage 2: Runner - Copy application code and run -FROM node:20-alpine +FROM node:25-alpine # Set working directory WORKDIR /app @@ -30,10 +30,6 @@ COPY --from=builder /app/node_modules ./node_modules COPY src/ ./src/ COPY config.default.js ./config.default.js COPY package.json ./package.json -COPY biome.json ./biome.json -COPY commitlint.config.mjs ./commitlint.config.mjs -COPY LICENSE ./LICENSE -COPY README.md ./README.md # Expose the port the application listens on (default is 3000 from config.default.js) EXPOSE 3000 diff --git a/README.md b/README.md index 1709097..5d0f388 100644 --- a/README.md +++ b/README.md @@ -91,35 +91,35 @@ docker run -p 2333:2333 nodelink NodeLink is compatible with most Lavalink clients, as it implements nearly the entire original API. However, some clients may not work properly, since NodeLink changes certain behaviors and endpoints. -| Client | Platform | v3 supported? | NodeLink Features? | NodeLink major version | -| ------------------------------------------------------------------- | ------------ | ------------- | ------------------ | ---------------------- | -| [Lavalink-Client](https://github.com/lavalink-devs/Lavalink-Client) | JVM | unknown | No | v1 and v2 | -| [Lavalink.kt](https://github.com/DRSchlaubi/Lavalink.kt) | Kotlin | unknown | No | v1 | -| [DisGoLink](https://github.com/disgoorg/disgolink) | Go | unknown | No | v1 and v2 | -| [Lavalink.py](https://github.com/devoxin/lavalink.py) | Python | unknown | No | v1 and v2 | -| [Mafic](https://github.com/ooliver1/mafic) | Python | unknown | No | v1 and v2 | -| [Wavelink](https://github.com/PythonistaGuild/Wavelink) | Python | Yes | No | v1, v2, v3 | -| [Pomice](https://github.com/cloudwithax/pomice) | Python | unknown | No | v1 and v2 | -| [lava-lyra](https://github.com/ParrotXray/lava-lyra) | Python | Yes | No | v3 | -| [Hikari-ongaku](https://github.com/MPlatypus/hikari-ongaku) | Python | unknown | No | v1 and v2 | -| [Moonlink.js](https://github.com/1Lucas1apk/moonlink.js) | TypeScript | Yes | No | v1, v2, v3 | -| [Magmastream](https://github.com/Blackfort-Hosting/magmastream) | TypeScript | unknown | No | v1 | -| [Lavacord](https://github.com/lavacord/Lavacord) | TypeScript | unknown | No | v1 and v2 | -| [Shoukaku](https://github.com/Deivu/Shoukaku) | TypeScript | Yes | No | v1, v2, v3 | -| [Lavalink-Client](https://github.com/tomato6966/Lavalink-Client) | TypeScript | Yes | No | v1 and v3 | -| [Rainlink](https://github.com/RainyXeon/Rainlink) | TypeScript | unknown | No | v1 and v2 | -| [Poru](https://github.com/parasop/Poru) | TypeScript | unknown | No | v1 and v2 | -| [Blue.ts](https://github.com/ftrapture/blue.ts) | TypeScript | unknown | No | v1 and v2 | -| [FastLink](https://github.com/PerformanC/FastLink) | Node.js | Yes | No | v1, v2, v3 | -| [Riffy](https://github.com/riffy-team/riffy) | Node.js | Yes | No | v1, v2, v3 | -| [TsumiLink](https://github.com/Fyphen1223/TsumiLink) | Node.js | unknown | No | v1 and v2 | -| [AquaLink](https://github.com/ToddyTheNoobDud/AquaLink) | JavaScript | Yes | No | v1, v2, v3 | -| [DisCatSharp](https://github.com/Aiko-IT-Systems/DisCatSharp) | .NET | unknown | No | v1 and v2 | -| [Lavalink4NET](https://github.com/angelobreuer/Lavalink4NET) | .NET | unknown | No | v1 and v2 | -| [Nomia](https://github.com/DHCPCD9/Nomia) | .NET | unknown | No | v1 and v2 | -| [CogLink](https://github.com/PerformanC/Coglink) | C | unknown | No | v1 and v2 | -| [Lavalink-rs](https://gitlab.com/vicky5124/lavalink-rs) | Rust, Python | unknown | No | v1 and v2 | -| [nyxx_lavalink](https://github.com/nyxx-discord/nyxx_lavalink) | Dart | unknown | No | v1 | +| Client | Platform | v3 supported? | NodeLink Features? | NodeLink major version | Notes | +| ------------------------------------------------------------------- | ------------ | ------------- | ------------------ | ---------------------- | --------------------------------------------------------------------- | +| [Lavalink-Client](https://github.com/lavalink-devs/Lavalink-Client) | JVM | unknown | No | v1 and v2 | | +| [Lavalink.kt](https://github.com/DRSchlaubi/Lavalink.kt) | Kotlin | unknown | No | v1 | | +| [DisGoLink](https://github.com/disgoorg/disgolink) | Go | unknown | No | v1 and v2 | | +| [Lavalink.py](https://github.com/devoxin/lavalink.py) | Python | unknown | No | v1 and v2 | | +| [Mafic](https://github.com/ooliver1/mafic) | Python | unknown | No | v1 and v2 | | +| [Wavelink](https://github.com/PythonistaGuild/Wavelink) | Python | Yes | No | v1, v2, v3 | | +| [Pomice](https://github.com/cloudwithax/pomice) | Python | unknown | No | v1 and v2 | | +| [lava-lyra](https://github.com/ParrotXray/lava-lyra) | Python | Yes | Yes | v3 | | +| [Hikari-ongaku](https://github.com/MPlatypus/hikari-ongaku) | Python | unknown | No | v1 and v2 | | +| [Moonlink.js](https://github.com/1Lucas1apk/moonlink.js) | TypeScript | Yes | No | v1, v2, v3 | | +| [Magmastream](https://github.com/Blackfort-Hosting/magmastream) | TypeScript | unknown | No | v1 | | +| [Lavacord](https://github.com/lavacord/Lavacord) | TypeScript | unknown | No | v1 and v2 | | +| [Shoukaku](https://github.com/Deivu/Shoukaku) | TypeScript | Yes | No | v1, v2, v3 | | +| [Lavalink-Client](https://github.com/tomato6966/Lavalink-Client) | TypeScript | No | No | v1 | Unstable for some users who have reported this over the months | +| [Rainlink](https://github.com/RainyXeon/Rainlink) | TypeScript | unknown | No | v1 and v2 | | +| [Poru](https://github.com/parasop/Poru) | TypeScript | unknown | No | v1 and v2 | | +| [Blue.ts](https://github.com/ftrapture/blue.ts) | TypeScript | unknown | No | v1 and v2 | | +| [FastLink](https://github.com/PerformanC/FastLink) | Node.js | Yes | No | v1, v2, v3 | | +| [Riffy](https://github.com/riffy-team/riffy) | Node.js | Yes | No | v1, v2, v3 | | +| [TsumiLink](https://github.com/Fyphen1223/TsumiLink) | Node.js | unknown | No | v1 and v2 | | +| [AquaLink](https://github.com/ToddyTheNoobDud/AquaLink) | JavaScript | Yes | Yes | v1, v2, v3 | | +| [DisCatSharp](https://github.com/Aiko-IT-Systems/DisCatSharp) | .NET | unknown | No | v1 and v2 | | +| [Lavalink4NET](https://github.com/angelobreuer/Lavalink4NET) | .NET | unknown | No | v1 and v2 | | +| [Nomia](https://github.com/DHCPCD9/Nomia) | .NET | unknown | No | v1 and v2 | | +| [CogLink](https://github.com/PerformanC/Coglink) | C | unknown | No | v1 and v2 | | +| [Lavalink-rs](https://gitlab.com/vicky5124/lavalink-rs) | Rust, Python | unknown | No | v1 and v2 | | +| [nyxx_lavalink](https://github.com/nyxx-discord/nyxx_lavalink) | Dart | unknown | No | v1 | | > [!IMPORTANT] > Lack of explicit NodeLink support *usually* means that the client implements the Lavalink API inconsistently, not following its defined formats and fields. Using such clients may lead to unexpected behavior. diff --git a/config.default.js b/config.default.js index 5c09d46..8f1bf5b 100644 --- a/config.default.js +++ b/config.default.js @@ -9,18 +9,36 @@ export default { enabled: true, // active cluster (or use env CLUSTER_ENABLED) workers: 0, // 0 => uses os.cpus().length, or specify a number (1 = 2 processes total: master + 1 worker) minWorkers: 1, // Minimum workers to keep alive (improves availability during bursts) + specializedSourceWorker: { + enabled: true, // If true, source loading (search, lyrics, etc.) is delegated to dedicated workers to prevent voice worker lag + count: 1, // Number of separate process clusters for source operations + microWorkers: 2, // Number of worker threads per process cluster + tasksPerWorker: 32, // Number of parallel tasks each micro-worker can handle before queuing + silentLogs: true // If true, micro-workers will only log warnings and errors + }, commandTimeout: 6000, // Timeout for heavy operations like loadTracks (6s) fastCommandTimeout: 4000, // Timeout for player commands like play/pause (4s) maxRetries: 2, // Number of retry attempts on timeout or worker failure + hibernation: { + enabled: true, + timeoutMs: 1200000 + }, scaling: { - // New object to group scaling configurations + //scaling configurations maxPlayersPerWorker: 20, // Reference capacity for utilization calculation targetUtilization: 0.7, // Target utilization for scaling up/down scaleUpThreshold: 0.75, // Utilization threshold to scale up scaleDownThreshold: 0.3, // Utilization threshold to scale down checkIntervalMs: 5000, // Interval to check for scaling needs idleWorkerTimeoutMs: 60000, // Time in ms an idle worker should wait before being removed - queueLengthScaleUpFactor: 5 // How many commands in queue per active worker trigger scale up + queueLengthScaleUpFactor: 5, // How many commands in queue per active worker trigger scale up + lagPenaltyLimit: 60, // Event loop lag threshold (ms) to penalize worker cost + cpuPenaltyLimit: 0.85 // CPU usage threshold (85% of a core) to force scale up + }, + endpoint: { + patchEnabled: true, + allowExternalPatch: false, + code: 'CAPYBARA' } }, logging: { @@ -55,10 +73,12 @@ export default { maxSearchResults: 10, maxAlbumPlaylistLength: 100, playerUpdateInterval: 2000, + statsUpdateInterval: 30000, trackStuckThresholdMs: 10000, zombieThresholdMs: 60000, enableHoloTracks: false, enableTrackStreamEndpoint: false, + enableLoadStreamEndpoint: false, resolveExternalLinks: false, fetchChannelInfo: false, filters: { @@ -79,9 +99,23 @@ export default { timescale: true } }, - defaultSearchSource: 'youtube', + defaultSearchSource: ['youtube', 'soundcloud'], unifiedSearchSources: ['youtube', 'soundcloud'], sources: { + vkmusic: { + enabled: true, + userToken: '', // (optional) get from vk in browser devtools -> reqs POST /?act=web_token HTTP/2 - headers -> response -> access_token + userCookie: '' // (required without userToken) get from vk in browser devtools -> reqs POST /?act=web_token HTTP/2 - headers -> request -> cookie (copy full cookie header) + }, + amazonmusic: { + enabled: true + }, + mixcloud: { + enabled: true + }, + audiomack: { + enabled: true + }, deezer: { // arl: '', // decryptionKey: '', @@ -91,7 +125,8 @@ export default { enabled: true }, soundcloud: { - enabled: true + enabled: true, + // clientId: "" }, local: { enabled: true, @@ -104,6 +139,23 @@ export default { // Note: not 100% of the songs are currently working (but most should.), because i need to code a different extractor for every year (2010, 2011, etc. not all are done) enabled: true, }, + telegram: { + enabled: true + }, + shazam: { + enabled: true, + allowExplicit: true + }, + bilibili: { + enabled: true, + sessdata: '' // Optional, improves access to some videos (premium and 4k+) + }, + genius: { + enabled: true + }, + pinterest: { + enabled: true + }, flowery: { enabled: true, voice: 'Salli', @@ -118,6 +170,14 @@ export default { artistLoadLimit: 20 // "secretKey": "38346591" // Optional, defaults to standard key }, + gaana: { + enabled: true, + apiUrl: 'https://gaana.1lucas1apk.fun/api', // if you want to host your server https://github.com/notdeltaxd/Gaana-API + streamQuality: 'high', + playlistLoadLimit: 100, + albumLoadLimit: 100, + artistLoadLimit: 100 + }, "google-tts": { enabled: true, language: 'en-US' @@ -157,6 +217,7 @@ export default { enabled: true, clientId: '', clientSecret: '', + externalAuthUrl: 'http://get.1lucas1apk.fun/spotify/gettoken', // URL to external token provider (e.g. http://localhost:8080/api/token - use https://github.com/topi314/spotify-tokener or https://github.com/1Lucas1apk/gettoken) market: 'US', playlistLoadLimit: 1, // 0 means no limit (loads all tracks), 1 = 100 tracks, 2 = 100 and so on! playlistPageLoadConcurrency: 10, // How many pages to load simultaneously @@ -185,7 +246,8 @@ export default { enabled: true, // Optional, setting this manually can help unblocking countries (since pandora is US only.). May need to be updated periodically. // fetching manually: use a vpn connected to US, go on pandora.com, open devtools, Network tab, first request to appear and copy the 2nd csrfToken= value. - // csrfToken: '' + // csrfToken: '', + remoteTokenUrl: 'https://get.1lucas1apk.fun/pandora/gettoken' // URL to a remote provider that returns { success: true, authToken: "...", csrfToken: "...", expires_in_seconds: ... } //https://github.com/1Lucas1apk/gettoken }, nicovideo: { enabled: true @@ -212,6 +274,9 @@ export default { lrclib: { enabled: true }, + bilibili: { + enabled: true + }, applemusic: { enabled: true, advanceSearch: true // Uses YTMusic to fetch the correct title and artists instead of relying on messy YouTube video titles, improving lyrics accuracy @@ -222,6 +287,10 @@ export default { encryption: 'aead_aes256_gcm_rtpsize', resamplingQuality: 'best' // best, medium, fastest, zero order holder, linear }, + voiceReceive: { + enabled: false, + format: 'opus' // pcm_s16le, opus + }, routePlanner: { strategy: 'RotateOnBan', // RotateOnBan, RoundRobin, LoadBalance bannedIpCooldown: 600000, // 10 minutes @@ -282,10 +351,10 @@ export default { autoCleanup: true }, plugins: [ -/* { - name: 'nodelink-sample-plugin', - source: 'local' - } */ + /* { + name: 'nodelink-sample-plugin', + source: 'local' + } */ ], pluginConfig: {} -} +} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index ef1c7b4..0d42178 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,19 +9,21 @@ services: ports: - "3000:3000" environment: - # Server Configuration + # --- Server Configuration --- NODELINK_SERVER_HOST: "0.0.0.0" NODELINK_SERVER_PORT: "3000" NODELINK_SERVER_PASSWORD: "youshallnotpass" # CHANGE THIS! # NODELINK_SERVER_USEBUNSERVER: "false" # set to true to use Bun.serve websocket (experimental) - # Cluster Configuration + # --- Cluster Configuration --- # NODELINK_CLUSTER_ENABLED: "true" # active cluster (or use env CLUSTER_ENABLED) # NODELINK_CLUSTER_WORKERS: "0" # 0 => uses os.cpus().length, or specify a number (1 = 2 processes total: master + 1 worker) # NODELINK_CLUSTER_MINWORKERS: "1" # Minimum workers to keep alive (improves availability during bursts) # NODELINK_CLUSTER_COMMANDTIMEOUT: "6000" # Timeout for heavy operations like loadTracks (6s) # NODELINK_CLUSTER_FASTCOMMANDTIMEOUT: "4000" # Timeout for player commands like play/pause (4s) # NODELINK_CLUSTER_MAXRETRIES: "2" # Number of retry attempts on timeout or worker failure + # NODELINK_CLUSTER_HIBERNATION_ENABLED: "true" + # NODELINK_CLUSTER_HIBERNATION_TIMEOUTMS: "1200000" # NODELINK_CLUSTER_SCALING_MAXPLAYERSPERWORKER: "20" # Reference capacity for utilization calculation # NODELINK_CLUSTER_SCALING_TARGETUTILIZATION: "0.7" # Target utilization for scaling up/down # NODELINK_CLUSTER_SCALING_SCALEUPTHRESHOLD: "0.75" # Utilization threshold to scale up @@ -29,11 +31,15 @@ services: # NODELINK_CLUSTER_SCALING_CHECKINTERVALMS: "5000" # Interval to check for scaling needs # NODELINK_CLUSTER_SCALING_IDLEWORKERTIMEOUTMS: "60000" # Time in ms an idle worker should wait before being removed # NODELINK_CLUSTER_SCALING_QUEUELENGTHSCALEUPFACTOR: "5" # How many commands in queue per active worker trigger scale up + # NODELINK_CLUSTER_SCALING_LAGPENALTYLIMIT: "60" # Event loop lag threshold (ms) to penalize worker cost + # NODELINK_CLUSTER_SCALING_CPUPENALTYLIMIT: "0.85" # CPU usage threshold (85% of a core) to force scale up - # Logging Configuration + # --- Logging Configuration --- # NODELINK_LOGGING_LEVEL: "debug" # NODELINK_LOGGING_FILE_ENABLED: "false" # NODELINK_LOGGING_FILE_PATH: "logs" + # NODELINK_LOGGING_FILE_ROTATION: "daily" + # NODELINK_LOGGING_FILE_TTLDAYS: "7" # NODELINK_LOGGING_DEBUG_ALL: "false" # NODELINK_LOGGING_DEBUG_REQUEST: "true" # NODELINK_LOGGING_DEBUG_SESSION: "true" @@ -44,29 +50,29 @@ services: # NODELINK_LOGGING_DEBUG_YOUTUBE: "true" # NODELINK_LOGGING_DEBUG_YOUTUBE_CIPHER: "true" - # Connection Configuration + # --- Connection Configuration --- # NODELINK_CONNECTION_LOGALLCHECKS: "false" # NODELINK_CONNECTION_INTERVAL: "300000" # 5 minutes # NODELINK_CONNECTION_TIMEOUT: "10000" # 10 seconds # NODELINK_CONNECTION_THRESHOLDS_BAD: "1" # Mbps # NODELINK_CONNECTION_THRESHOLDS_AVERAGE: "5" # Mbps - # General Limits + # --- General Limits & Timeouts --- # NODELINK_MAXSEARCHRESULTS: "10" # NODELINK_MAXALBUMPLAYLISTLENGTH: "100" - - # Player Update & Zombie Detection # NODELINK_PLAYERUPDATEINTERVAL: "2000" + # NODELINK_STATSUPDATEINTERVAL: "30000" # NODELINK_TRACKSTUCKTHRESHOLDMS: "10000" # NODELINK_ZOMBIETHRESHOLDMS: "60000" - # Feature Toggles + # --- Feature Toggles --- # NODELINK_ENABLEHOLOTRACKS: "false" # NODELINK_ENABLETRACKSTREAMENDPOINT: "false" + # NODELINK_ENABLELOADSTREAMENDPOINT: "false" # NODELINK_RESOLVEEXTERNALLINKS: "false" # NODELINK_FETCHCHANNELINFO: "false" - # Filters Configuration + # --- Filters Configuration --- # NODELINK_FILTERS_ENABLED_TREMOLO: "true" # NODELINK_FILTERS_ENABLED_VIBRATO: "true" # NODELINK_FILTERS_ENABLED_LOWPASS: "true" @@ -82,111 +88,173 @@ services: # NODELINK_FILTERS_ENABLED_PHASER: "true" # NODELINK_FILTERS_ENABLED_TIMESCALE: "true" - # Search Source Defaults - # NODELINK_DEFAULTSEARCHSOURCE: "youtube" + # --- Search Defaults --- + # NODELINK_DEFAULTSEARCHSOURCE: '["youtube", "soundcloud"]' # NODELINK_UNIFIEDSEARCHSOURCES: '["youtube", "soundcloud"]' - # Sources Configuration + # --- Sources Configuration --- + # VK Music + # NODELINK_SOURCES_VKMUSIC_ENABLED: "true" + # NODELINK_SOURCES_VKMUSIC_USERTOKEN: "" + # NODELINK_SOURCES_VKMUSIC_USERCOOKIE: "" + + # Amazon Music + # NODELINK_SOURCES_AMAZONMUSIC_ENABLED: "true" + + # Mixcloud + # NODELINK_SOURCES_MIXCLOUD_ENABLED: "true" + + # Deezer # NODELINK_SOURCES_DEEZER_ENABLED: "true" # NODELINK_SOURCES_DEEZER_ARL: "" # NODELINK_SOURCES_DEEZER_DECRYPTIONKEY: "" + + # Bandcamp # NODELINK_SOURCES_BANDCAMP_ENABLED: "true" + + # SoundCloud # NODELINK_SOURCES_SOUNDCLOUD_ENABLED: "true" + + # Local Source # NODELINK_SOURCES_LOCAL_ENABLED: "true" # NODELINK_SOURCES_LOCAL_BASEPATH: "./local-music/" + + # HTTP Source # NODELINK_SOURCES_HTTP_ENABLED: "true" + + # Vimeo + # NODELINK_SOURCES_VIMEO_ENABLED: "true" + + # Telegram + # NODELINK_SOURCES_TELEGRAM_ENABLED: "true" + + # Bilibili + # NODELINK_SOURCES_BILIBILI_ENABLED: "true" + # NODELINK_SOURCES_BILIBILI_SESSDATA: "" + + # JioSaavn # NODELINK_SOURCES_JIOSAAVN_ENABLED: "true" # NODELINK_SOURCES_JIOSAAVN_PLAYLISTLOADLIMIT: "50" # NODELINK_SOURCES_JIOSAAVN_ARTISTLOADLIMIT: "20" - # NODELINK_SOURCES_JIOSAAVN_SECRETKEY: "" # Optional, defaults to standard key + + # Google TTS + # NODELINK_SOURCES_GOOGLE-TTS_ENABLED: "true" + # NODELINK_SOURCES_GOOGLE-TTS_LANGUAGE: "en-US" + + # YouTube # NODELINK_SOURCES_YOUTUBE_ENABLED: "true" - # NODELINK_SOURCES_YOUTUBE_ALLOWITAG: "[]" # additional itags for audio streams, e.g., [140, 141] - # NODELINK_SOURCES_YOUTUBE_TARGETITAG: "" # force a specific itag for audio streams, overriding the quality option + # NODELINK_SOURCES_YOUTUBE_ALLOWITAG: "[]" # MUST be a JSON array string + # NODELINK_SOURCES_YOUTUBE_TARGETITAG: "" # NODELINK_SOURCES_YOUTUBE_GETOAUTHTOKEN: "false" # NODELINK_SOURCES_YOUTUBE_HL: "en" # NODELINK_SOURCES_YOUTUBE_GL: "US" - # NODELINK_SOURCES_YOUTUBE_CLIENTS_SEARCH: '["Android"]' # Clients used for searching tracks - # NODELINK_SOURCES_YOUTUBE_CLIENTS_PLAYBACK: '["AndroidVR", "TV", "TVEmbedded", "IOS"]' # Clients used for playback/streaming - # NODELINK_SOURCES_YOUTUBE_CLIENTS_RESOLVE: '["AndroidVR", "TV", "TVEmbedded", "IOS", "Web"]' # Clients used for resolving detailed track information (channel, external links, etc.) - # NODELINK_SOURCES_YOUTUBE_CLIENTS_SETTINGS_TV_REFRESHTOKEN: "" + # NODELINK_SOURCES_YOUTUBE_CLIENTS_SEARCH: '["Android"]' # MUST be a JSON array string + # NODELINK_SOURCES_YOUTUBE_CLIENTS_PLAYBACK: '["AndroidVR", "TV", "TVEmbedded", "IOS"]' # MUST be a JSON array string + # NODELINK_SOURCES_YOUTUBE_CLIENTS_RESOLVE: '["AndroidVR", "TV", "TVEmbedded", "IOS", "Web"]' # MUST be a JSON array string + # NODELINK_SOURCES_YOUTUBE_CLIENTS_SETTINGS_TV_REFRESHTOKEN: '["TOKEN_HERE"]' # MUST be a JSON array string NODELINK_SOURCES_YOUTUBE_CIPHER_URL: "https://cipher.kikkia.dev/api" - # NODELINK_SOURCES_YOUTUBE_CIPHER_TOKEN: "KEY" # CHANGE THIS! + # NODELINK_SOURCES_YOUTUBE_CIPHER_TOKEN: "" + + # Instagram # NODELINK_SOURCES_INSTAGRAM_ENABLED: "true" + + # Kwai # NODELINK_SOURCES_KWAI_ENABLED: "true" + + # Twitch # NODELINK_SOURCES_TWITCH_ENABLED: "true" + + # Spotify # NODELINK_SOURCES_SPOTIFY_ENABLED: "true" - # NODELINK_SOURCES_SPOTIFY_CLIENTID: "" # CHANGE THIS! - # NODELINK_SOURCES_SPOTIFY_CLIENTSECRET: "" # CHANGE THIS! + # NODELINK_SOURCES_SPOTIFY_CLIENTID: "" + # NODELINK_SOURCES_SPOTIFY_CLIENTSECRET: "" + NODELINK_SOURCES_SPOTIFY_EXTERNALAUTHURL: "http://get.1lucas1apk.fun/spotify/gettoken" # NODELINK_SOURCES_SPOTIFY_MARKET: "US" - # NODELINK_SOURCES_SPOTIFY_PLAYLISTLOADLIMIT: "1" # 0 means no limit (loads all tracks), 1 = 100 tracks, 2 = 100 and so on! - # NODELINK_SOURCES_SPOTIFY_PLAYLISTPAGELOADCONCURRENCY: "10" # How many pages to load simultaneously - # NODELINK_SOURCES_SPOTIFY_ALBUMLOADLIMIT: "1" # 0 means no limit (loads all tracks), 1 = 50 tracks, 2 = 100 tracks, etc. - # NODELINK_SOURCES_SPOTIFY_ALBUMPAGELOADCONCURRENCY: "5" # How many pages to load simultaneously - # NODELINK_SOURCES_SPOTIFY_ALLOWEXPLICIT: "true" # If true plays the explicit version of the song, If false plays the Non-Explicit version of the song. Normal songs are not affected. + # NODELINK_SOURCES_SPOTIFY_PLAYLISTLOADLIMIT: "1" + # NODELINK_SOURCES_SPOTIFY_PLAYLISTPAGELOADCONCURRENCY: "10" + # NODELINK_SOURCES_SPOTIFY_ALBUMLOADLIMIT: "1" + # NODELINK_SOURCES_SPOTIFY_ALBUMPAGELOADCONCURRENCY: "5" + # NODELINK_SOURCES_SPOTIFY_ALLOWEXPLICIT: "true" + + # Apple Music # NODELINK_SOURCES_APPLEMUSIC_ENABLED: "true" - # NODELINK_SOURCES_APPLEMUSIC_MEDIAAPITOKEN: "token_here" # manually | or "token_here" to get a token automatically # CHANGE THIS! + # NODELINK_SOURCES_APPLEMUSIC_MEDIAAPITOKEN: "token_here" # NODELINK_SOURCES_APPLEMUSIC_MARKET: "US" # NODELINK_SOURCES_APPLEMUSIC_PLAYLISTLOADLIMIT: "0" # NODELINK_SOURCES_APPLEMUSIC_ALBUMLOADLIMIT: "0" # NODELINK_SOURCES_APPLEMUSIC_PLAYLISTPAGELOADCONCURRENCY: "5" # NODELINK_SOURCES_APPLEMUSIC_ALBUMPAGELOADCONCURRENCY: "5" # NODELINK_SOURCES_APPLEMUSIC_ALLOWEXPLICIT: "true" + + # Tidal # NODELINK_SOURCES_TIDAL_ENABLED: "true" - # NODELINK_SOURCES_TIDAL_TOKEN: "" # get from tidal web player devtools; using login google account # CHANGE THIS! + # NODELINK_SOURCES_TIDAL_TOKEN: "" # NODELINK_SOURCES_TIDAL_COUNTRYCODE: "US" - # NODELINK_SOURCES_TIDAL_PLAYLISTLOADLIMIT: "2" # 0 = no limit, 1 = 50 tracks, 2 = 100 tracks, etc. - # NODELINK_SOURCES_TIDAL_PLAYLISTPAGELOADCONCURRENCY: "5" # How many pages to load simultaneously + # NODELINK_SOURCES_TIDAL_PLAYLISTLOADLIMIT: "2" + # NODELINK_SOURCES_TIDAL_PLAYLISTPAGELOADCONCURRENCY: "5" + + # Pandora # NODELINK_SOURCES_PANDORA_ENABLED: "true" - # NODELINK_SOURCES_PANDORA_CSRFTOKEN: "" # Optional, setting this manually can help unblocking countries (since pandora is US only.). May need to be updated periodically. # fetching manually: use a vpn connected to US, go on pandora.com, open devtools, Network tab, first request to appear and copy the 2nd csrfToken= value. + # NODELINK_SOURCES_PANDORA_REMOTETOKENURL: "https://get.1lucas1apk.fun/pandora/gettoken" + + # Other Sources # NODELINK_SOURCES_NICOVIDEO_ENABLED: "true" # NODELINK_SOURCES_REDDIT_ENABLED: "true" # NODELINK_SOURCES_LASTFM_ENABLED: "true" - # Lyrics Configuration + # --- Lyrics Configuration --- # NODELINK_LYRICS_FALLBACKSOURCE: "genius" # NODELINK_LYRICS_YOUTUBE_ENABLED: "true" # NODELINK_LYRICS_GENIUS_ENABLED: "true" # NODELINK_LYRICS_MUSIXMATCH_ENABLED: "true" # NODELINK_LYRICS_MUSIXMATCH_SIGNATURESECRET: "" # NODELINK_LYRICS_LRCLIB_ENABLED: "true" + # NODELINK_LYRICS_BILIBILI_ENABLED: "true" # NODELINK_LYRICS_APPLEMUSIC_ENABLED: "true" - # NODELINK_LYRICS_APPLEMUSIC_ADVANCESEARCH: "true" # Uses YTMusic to fetch the correct title and artists instead of relying on messy YouTube video titles, improving lyrics accuracy + # NODELINK_LYRICS_APPLEMUSIC_ADVANCESEARCH: "true" - # Audio Configuration - # NODELINK_AUDIO_QUALITY: "high" # high, medium, low, lowest + # --- Audio Configuration --- + # NODELINK_AUDIO_QUALITY: "high" # NODELINK_AUDIO_ENCRYPTION: "aead_aes256_gcm_rtpsize" - # NODELINK_AUDIO_RESAMPLINGQUALITY: "best" # best, medium, fastest, zero order holder, linear + # NODELINK_AUDIO_RESAMPLINGQUALITY: "best" - # Route Planner Configuration - # NODELINK_ROUTEPLANNER_STRATEGY: "RotateOnBan" # RotateOnBan, RoundRobin, LoadBalance - # NODELINK_ROUTEPLANNER_BANNEDIPCOOLDOWN: "600000" # 10 minutes - # NODELINK_ROUTEPLANNER_IPBLOCKS: "[]" + # --- Route Planner Configuration --- + # NODELINK_ROUTEPLANNER_STRATEGY: "RotateOnBan" + # NODELINK_ROUTEPLANNER_BANNEDIPCOOLDOWN: "600000" + # NODELINK_ROUTEPLANNER_IPBLOCKS: "[]" # MUST be a JSON array string - # Rate Limit Configuration + # --- Rate Limit Configuration --- # NODELINK_RATELIMIT_ENABLED: "true" # NODELINK_RATELIMIT_GLOBAL_MAXREQUESTS: "1000" - # NODELINK_RATELIMIT_GLOBAL_TIMEWINDOWMS: "60000" # 1 minute + # NODELINK_RATELIMIT_GLOBAL_TIMEWINDOWMS: "60000" # NODELINK_RATELIMIT_PERIP_MAXREQUESTS: "100" - # NODELINK_RATELIMIT_PERIP_TIMEWINDOWMS: "10000" # 10 seconds + # NODELINK_RATELIMIT_PERIP_TIMEWINDOWMS: "10000" # NODELINK_RATELIMIT_PERUSERID_MAXREQUESTS: "50" - # NODELINK_RATELIMIT_PERUSERID_TIMEWINDOWMS: "5000" # 5 seconds + # NODELINK_RATELIMIT_PERUSERID_TIMEWINDOWMS: "5000" # NODELINK_RATELIMIT_PERGUILDID_MAXREQUESTS: "20" - # NODELINK_RATELIMIT_PERGUILDID_TIMEWINDOWMS: "5000" # 5 seconds - # NODELINK_RATELIMIT_IGNOREPATHS: "[]" + # NODELINK_RATELIMIT_PERGUILDID_TIMEWINDOWMS: "5000" + # NODELINK_RATELIMIT_IGNOREPATHS: "[]" # MUST be a JSON array string - # DoS Protection Configuration + # --- DoS Protection Configuration --- # NODELINK_DOSPROTECTION_ENABLED: "true" # NODELINK_DOSPROTECTION_THRESHOLDS_BURSTREQUESTS: "50" - # NODELINK_DOSPROTECTION_THRESHOLDS_TIMEWINDOWMS: "10000" # 10 seconds + # NODELINK_DOSPROTECTION_THRESHOLDS_TIMEWINDOWMS: "10000" # NODELINK_DOSPROTECTION_MITIGATION_DELAYMS: "500" - # NODELINK_DOSPROTECTION_MITIGATION_BLOCKDURATIONMS: "300000" # 5 minutes + # NODELINK_DOSPROTECTION_MITIGATION_BLOCKDURATIONMS: "300000" - # Metrics Configuration + # --- Metrics Configuration --- # NODELINK_METRICS_ENABLED: "true" - # NODELINK_METRICS_AUTHORIZATION_TYPE: "Bearer" # Bearer or Basic. - # NODELINK_METRICS_AUTHORIZATION_PASSWORD: "" # If empty, uses server.password + # NODELINK_METRICS_AUTHORIZATION_TYPE: "Bearer" + # NODELINK_METRICS_AUTHORIZATION_PASSWORD: "" + + # --- Audio Mix Configuration --- + # NODELINK_MIX_ENABLED: "true" + # NODELINK_MIX_DEFAULTVOLUME: "0.8" + # NODELINK_MIX_MAXLAYERSMIX: "5" + # NODELINK_MIX_AUTOCLEANUP: "true" - # Uncomment and configure these volumes if you want to persist data or use local files # volumes: - # - ./local-music:/app/local-music # Mount a local music directory for 'local' source - # - ./logs:/app/logs # Mount logs directory if logging.file.enabled is true \ No newline at end of file + # - ./local-music:/app/local-music + # - ./logs:/app/logs + # - ./.cache:/app/.cache + restart: unless-stopped diff --git a/package.json b/package.json index a9b2a86..4703ba8 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "nodelink", - "version": "3.4.0", + "version": "3.5.0", "scripts": { "build": "node scripts/build.js", "start": "node --dns-result-order=ipv4first --openssl-legacy-provider src/index.js", @@ -16,14 +16,14 @@ "@performanc/pwsl-server": "github:performanc/internals#PWSL-server", "@performanc/voice": "github:PerformanC/voice", "@toddynnn/symphonia-decoder": "1.0.6", + "@toddynnn/voice-opus": "^1.0.1", "mp4box": "^2.3.0", - "myzod": "^1.12.1", - "toddy-mediaplex": "^2.0.0" + "myzod": "^1.12.1" }, "devDependencies": { "@biomejs/biome": "^2.3.10", - "@commitlint/cli": "20.2.0", - "@commitlint/config-conventional": "20.2.0", + "@commitlint/cli": "20.3.0", + "@commitlint/config-conventional": "20.3.0", "dotenv": "^17.2.3", "husky": "9.1.7" }, diff --git a/plugins/@1lucas1apk/tunnel-cloudflared/index.js b/plugins/@1lucas1apk/tunnel-cloudflared/index.js index 6d20642..c85f125 100644 --- a/plugins/@1lucas1apk/tunnel-cloudflared/index.js +++ b/plugins/@1lucas1apk/tunnel-cloudflared/index.js @@ -29,19 +29,40 @@ export default async function(nodelink, config, context) { await install(bin) } + logger(`Starting tunnel on port ${port}...`) + const tunnel = spawn( bin, ["tunnel", "run", "--token", token, "--url", `http://127.0.0.1:${port}`], - { stdio: "inherit", env: process.env } + { stdio: ["ignore", "pipe", "pipe"], env: process.env } ) - const stopAll = () => { - try { tunnel.kill("SIGTERM") } catch {} - } + tunnel.stdout.on('data', (data) => { + const msg = data.toString().trim() + if (msg) logger(msg, 'debug') + }) + + tunnel.stderr.on('data', (data) => { + const msg = data.toString().trim() + if (msg.includes('Registered tunnel connection')) { + logger('Tunnel connection established successfully.') + } + }) + + tunnel.on('error', (err) => { + logger(`Failed to start cloudflared: ${err.message}`, 'error') + }) - process.on("SIGINT", stopAll) - process.on("SIGTERM", stopAll) - process.on("beforeExit", stopAll) + tunnel.on('close', (code) => { + if (code !== null && code !== 0 && code !== 1) { + logger(`Cloudflared exited with code ${code}`, 'warn') + } + }) - logger(`Tunnel started on port ${port}`) + nodelink.once('shutdown', () => { + if (tunnel && !tunnel.killed) { + logger('Closing tunnel...') + tunnel.kill("SIGKILL") + } + }) } diff --git a/src/api/decodeTrack.js b/src/api/decodeTrack.js index f28c8ef..aeec215 100644 --- a/src/api/decodeTrack.js +++ b/src/api/decodeTrack.js @@ -1,16 +1,11 @@ import myzod from 'myzod' -import { - decodeTrack, - logger, - sendResponse, - sendErrorResponse -} from '../utils.js' +import { decodeTrack, logger, sendErrorResponse } from '../utils.js' const decodeTrackSchema = myzod.object({ encodedTrack: myzod.string() }) -function handler(nodelink, req, res, sendResponse, parsedUrl) { +function handler(_nodelink, req, res, sendResponse, parsedUrl) { const result = decodeTrackSchema.try({ encodedTrack: parsedUrl.searchParams.get('encodedTrack') }) @@ -34,6 +29,14 @@ function handler(nodelink, req, res, sendResponse, parsedUrl) { try { logger('debug', 'Tracks', `Decoding track: ${encodedTrack}`) const decodedTrack = decodeTrack(encodedTrack) + if (decodedTrack.details) { + decodedTrack.pluginInfo = { + ...decodedTrack.pluginInfo, + details: decodedTrack.details + } + + delete decodedTrack.details + } sendResponse(req, res, decodedTrack, 200) } catch (err) { logger('error', 'Tracks', `Failed to decode track ${encodedTrack}:`, err) diff --git a/src/api/decodeTracks.js b/src/api/decodeTracks.js index 478247d..f81aa4f 100644 --- a/src/api/decodeTracks.js +++ b/src/api/decodeTracks.js @@ -1,14 +1,9 @@ import myzod from 'myzod' -import { - decodeTrack, - logger, - sendResponse, - sendErrorResponse -} from '../utils.js' +import { decodeTrack, logger, sendErrorResponse } from '../utils.js' const decodeTracksSchema = myzod.array(myzod.string()).min(1) -function handler(nodelink, req, res, sendResponse, parsedUrl) { +function handler(_nodelink, req, res, sendResponse, parsedUrl) { const result = decodeTracksSchema.try(req.body) if (result instanceof myzod.ValidationError) { diff --git a/src/api/encodeTrack.js b/src/api/encodeTrack.js index b8b3efe..5f71990 100644 --- a/src/api/encodeTrack.js +++ b/src/api/encodeTrack.js @@ -1,16 +1,11 @@ import myzod from 'myzod' -import { - encodeTrack, - logger, - sendResponse, - sendErrorResponse -} from '../utils.js' +import { encodeTrack, logger, sendErrorResponse } from '../utils.js' const encodeTrackSchema = myzod.object({ track: myzod.string() }) -function handler(nodelink, req, res, sendResponse, parsedUrl) { +function handler(_nodelink, req, res, sendResponse, parsedUrl) { const result = encodeTrackSchema.try({ track: parsedUrl.searchParams.get('track') }) diff --git a/src/api/encodedTracks.js b/src/api/encodedTracks.js index 8dd64ff..7d18070 100644 --- a/src/api/encodedTracks.js +++ b/src/api/encodedTracks.js @@ -1,10 +1,5 @@ import myzod from 'myzod' -import { - encodeTrack, - logger, - sendResponse, - sendErrorResponse -} from '../utils.js' +import { encodeTrack, logger, sendErrorResponse } from '../utils.js' const encodedTracksSchema = myzod .array( @@ -17,7 +12,7 @@ const encodedTracksSchema = myzod ) .min(1) -function handler(nodelink, req, res, sendResponse, parsedUrl) { +function handler(_nodelink, req, res, sendResponse, parsedUrl) { const result = encodedTracksSchema.try(req.body) if (result instanceof myzod.ValidationError) { diff --git a/src/api/index.js b/src/api/index.js index 7c69565..64fa953 100644 --- a/src/api/index.js +++ b/src/api/index.js @@ -4,9 +4,9 @@ import { fileURLToPath } from 'node:url' import { PATH_VERSION } from '../constants.js' import { logger, + sendErrorResponse, sendResponse, - verifyMethod, - sendErrorResponse + verifyMethod } from '../utils.js' let apiRegistry @@ -24,12 +24,10 @@ async function loadRoutes() { let routeModules = [] if (apiRegistry) { - routeModules = Object.entries(apiRegistry).map( - ([file, mod]) => ({ - file, - module: mod.default || mod - }) - ) + routeModules = Object.entries(apiRegistry).map(([file, mod]) => ({ + file, + module: mod.default || mod + })) } if (routeModules.length === 0) { @@ -91,7 +89,7 @@ async function requestHandler(nodelink, req, res) { if (middlewares && Array.isArray(middlewares)) { for (const middleware of middlewares) { const result = await middleware(nodelink, req, res, parsedUrl) - if (result === true) return + if (result === true) return } } @@ -102,7 +100,7 @@ async function requestHandler(nodelink, req, res) { const clientAddress = `${isInternal ? '[Internal]' : '[External]'} (${remoteAddress}:${req.socket.remotePort})` const originalEnd = res.end - res.end = function(...args) { + res.end = (...args) => { const duration = Date.now() - startTime nodelink.statsManager.recordHttpRequestDuration( parsedUrl.pathname, @@ -128,25 +126,31 @@ async function requestHandler(nodelink, req, res) { } const authConfig = metricsConfig.authorization || {} - let authType = authConfig.type; - if(!['Bearer', 'Basic'].includes(authType)) { - logger('warn',`Config: metrics authorization.type SHOULD BE one of 'Bearer', 'Basic'.... Defaulting to 'Bearer'!`); - authType = 'Bearer'; + let authType = authConfig.type + if (!['Bearer', 'Basic'].includes(authType)) { + logger( + 'warn', + `Config: metrics authorization.type SHOULD BE one of 'Bearer', 'Basic'.... Defaulting to 'Bearer'!` + ) + authType = 'Bearer' } - - const metricsPassword = authConfig.password || nodelink.options.server.password + + const metricsPassword = + authConfig.password || nodelink.options.server.password const authHeader = req.headers?.authorization const isValidAuth = - authHeader === metricsPassword - || (authType === 'Bearer' && authHeader === `${authType} ${metricsPassword}`) - || (authType === 'Basic' && authHeader === `${authType} ${atob(authHeader.slice(authType.length))}`) + authHeader === metricsPassword || + (authType === 'Bearer' && + authHeader === `${authType} ${metricsPassword}`) || + (authType === 'Basic' && + authHeader === `${authType} ${atob(authHeader.slice(authType.length))}`) if (!isValidAuth) { logger( 'warn', 'Metrics', - `Unauthorized metrics access attempt from ${clientAddress} - Invalid password provided` + `Unauthorized metrics access attempt from ${clientAddress} - Invalid password provided: ${authHeader || 'None'}` ) res.writeHead(401, { 'Content-Type': 'text/plain' }) res.end('Unauthorized') @@ -180,7 +184,14 @@ async function requestHandler(nodelink, req, res) { await new Promise((resolve) => setTimeout(resolve, dosCheck.delay)) } - if (!nodelink.rateLimitManager.check(req, parsedUrl)) { + const rateLimitCheck = nodelink.rateLimitManager.check(req, parsedUrl) + if (rateLimitCheck.limit !== undefined) { + res.setHeader('X-RateLimit-Limit', rateLimitCheck.limit) + res.setHeader('X-RateLimit-Remaining', rateLimitCheck.remaining) + res.setHeader('X-RateLimit-Reset', Math.ceil(rateLimitCheck.reset / 1000)) + } + + if (!rateLimitCheck.allowed) { logger( 'warn', 'RateLimit', @@ -190,6 +201,10 @@ async function requestHandler(nodelink, req, res) { parsedUrl.pathname, remoteAddress ) + + const retryAfter = Math.ceil((rateLimitCheck.reset - Date.now()) / 1000) + res.setHeader('Retry-After', retryAfter) + sendErrorResponse( req, res, @@ -203,15 +218,16 @@ async function requestHandler(nodelink, req, res) { } if (!isMetricsEndpoint) { + const authHeader = req.headers?.authorization if ( - !req.headers || - req.headers.authorization !== nodelink.options.server.password && - req.headers.authorization !== `Bearer ${nodelink.options.server.password}` + !authHeader || + (authHeader !== nodelink.options.server.password && + authHeader !== `Bearer ${nodelink.options.server.password}`) ) { logger( 'warn', 'Server', - `Unauthorized connection attempt from ${clientAddress} - Invalid password provided` + `Unauthorized connection attempt from ${clientAddress} - Invalid password provided: ${authHeader || 'None'}` ) res.writeHead(401, { 'Content-Type': 'text/plain' }) @@ -288,9 +304,7 @@ async function requestHandler(nodelink, req, res) { const customRoutes = nodelink.extensions?.routes if (customRoutes && Array.isArray(customRoutes)) { - const customRoute = customRoutes.find( - (r) => r.path === parsedUrl.pathname - ) + const customRoute = customRoutes.find((r) => r.path === parsedUrl.pathname) if (customRoute) { if ( diff --git a/src/api/loadChapters.js b/src/api/loadChapters.js index ea576f2..409b66e 100644 --- a/src/api/loadChapters.js +++ b/src/api/loadChapters.js @@ -1,10 +1,5 @@ import myzod from 'myzod' -import { - decodeTrack, - logger, - sendResponse, - sendErrorResponse -} from '../utils.js' +import { decodeTrack, logger, sendErrorResponse } from '../utils.js' const loadChaptersSchema = myzod.object({ encodedTrack: myzod.string() @@ -43,7 +38,10 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { ) } - if (decodedTrack.info.sourceName !== 'youtube' && decodedTrack.info.sourceName !== 'ytmusic') { + if ( + decodedTrack.info.sourceName !== 'youtube' && + decodedTrack.info.sourceName !== 'ytmusic' + ) { return sendResponse(req, res, [], 200) } @@ -53,12 +51,30 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { `Request to load chapters for: ${decodedTrack.info.title}` ) + let delegated = false + if (nodelink.sourceWorkerManager) { + delegated = nodelink.sourceWorkerManager.delegate( + req, + res, + 'loadChapters', + { + decodedTrackInfo: decodedTrack.info + } + ) + } + + if (delegated) return + let chaptersData if (nodelink.workerManager) { const worker = nodelink.workerManager.getBestWorker() - chaptersData = await nodelink.workerManager.execute(worker, 'loadChapters', { - decodedTrack - }) + chaptersData = await nodelink.workerManager.execute( + worker, + 'loadChapters', + { + decodedTrackInfo: decodedTrack.info + } + ) } else { chaptersData = await nodelink.sources.getChapters(decodedTrack) } @@ -80,4 +96,4 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { export default { handler -} +} \ No newline at end of file diff --git a/src/api/loadLyrics.js b/src/api/loadLyrics.js index f535c78..dbfb3e4 100644 --- a/src/api/loadLyrics.js +++ b/src/api/loadLyrics.js @@ -1,10 +1,5 @@ import myzod from 'myzod' -import { - decodeTrack, - logger, - sendResponse, - sendErrorResponse -} from '../utils.js' +import { decodeTrack, logger, sendErrorResponse } from '../utils.js' const loadLyricsSchema = myzod.object({ encodedTrack: myzod.string(), @@ -57,11 +52,26 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { `Request to load lyrics for: ${decodedTrack.info.title}${language ? ` (Lang: ${language})` : ''}` ) + let delegated = false + if (nodelink.sourceWorkerManager) { + delegated = nodelink.sourceWorkerManager.delegate( + req, + res, + 'loadLyrics', + { + decodedTrackInfo: decodedTrack.info, + language + } + ) + } + + if (delegated) return + let lyricsData if (nodelink.workerManager) { const worker = nodelink.workerManager.getBestWorker() lyricsData = await nodelink.workerManager.execute(worker, 'loadLyrics', { - decodedTrack, + decodedTrackInfo: decodedTrack.info, language }) } else { diff --git a/src/api/loadStream.js b/src/api/loadStream.js new file mode 100644 index 0000000..f71467f --- /dev/null +++ b/src/api/loadStream.js @@ -0,0 +1,242 @@ +import { pipeline } from 'node:stream' +import myzod from 'myzod' +import { createPCMStream } from '../playback/streamProcessor.js' +import { decodeTrack, logger, sendErrorResponse } from '../utils.js' + +const loadStreamSchema = myzod.object({ + encodedTrack: myzod.string(), + volume: myzod.number().min(0).max(1000).optional(), + position: myzod.number().min(0).optional(), + filters: myzod.unknown().optional() +}) + +async function handler(nodelink, req, res, _sendResponse, parsedUrl) { + if (!nodelink.options.enableLoadStreamEndpoint) { + return sendErrorResponse( + req, + res, + 404, + 'Not Found', + 'The requested route was not found.', + parsedUrl.pathname + ) + } + + let result + try { + if (req.method === 'POST') { + result = loadStreamSchema.try(req.body) + } else { + const filtersRaw = parsedUrl.searchParams.get('filters') + let filters + if (filtersRaw) { + try { + filters = JSON.parse(filtersRaw) + } catch { + filters = undefined + } + } + + result = loadStreamSchema.try({ + encodedTrack: parsedUrl.searchParams.get('encodedTrack'), + volume: parsedUrl.searchParams.get('volume') + ? Number(parsedUrl.searchParams.get('volume')) + : undefined, + position: + parsedUrl.searchParams.get('position') || + parsedUrl.searchParams.get('t') + ? Number( + parsedUrl.searchParams.get('position') || + parsedUrl.searchParams.get('t') + ) + : undefined, + filters + }) + } + + if (result instanceof myzod.ValidationError) { + return sendErrorResponse( + req, + res, + 400, + 'Bad Request', + result.message, + parsedUrl.pathname + ) + } + + const { encodedTrack, volume = 100, position = 0, filters = {} } = result + const decodedTrack = decodeTrack(encodedTrack.replace(/ /g, '+')) + + if (!decodedTrack) { + return sendErrorResponse( + req, + res, + 400, + 'Bad Request', + 'Invalid encoded track', + parsedUrl.pathname + ) + } + + if (nodelink.sourceWorkerManager) { + const delegated = nodelink.sourceWorkerManager.delegate( + req, + res, + 'loadStream', + { + decodedTrackInfo: decodedTrack.info, + volume, + position, + filters + }, + { + headers: { + 'Content-Type': 'audio/l16;rate=48000;channels=2', + 'Transfer-Encoding': 'chunked', + Connection: 'keep-alive' + } + } + ) + if (delegated) return + } + + if (!nodelink.sources && nodelink.workerManager) { + const delegated = nodelink.workerManager.delegateStream( + req, + res, + { + decodedTrackInfo: decodedTrack.info, + volume, + position, + filters + }, + { + headers: { + 'Content-Type': 'audio/l16;rate=48000;channels=2', + 'Transfer-Encoding': 'chunked', + Connection: 'keep-alive' + } + } + ) + if (delegated) return + return sendErrorResponse( + req, + res, + 503, + 'Service Unavailable', + 'No available workers to stream audio.', + parsedUrl.pathname + ) + } + + if (!nodelink.sources && !nodelink.workerManager) { + return sendErrorResponse( + req, + res, + 503, + 'Service Unavailable', + 'Sources manager is not available for loadStream.', + parsedUrl.pathname + ) + } + + let urlResult + if (nodelink.workerManager) { + const worker = nodelink.workerManager.getBestWorker() + urlResult = await nodelink.workerManager.execute(worker, 'getTrackUrl', { + decodedTrackInfo: decodedTrack.info + }) + } else { + urlResult = await nodelink.sources.getTrackUrl(decodedTrack.info) + } + + if (urlResult.exception) { + return sendErrorResponse( + req, + res, + 500, + 'Internal Server Error', + urlResult.exception.message, + parsedUrl.pathname + ) + } + + const additionalData = { ...urlResult.additionalData, startTime: position } + + const fetched = await nodelink.sources.getTrackStream( + urlResult.newTrack?.info || decodedTrack.info, + urlResult.url, + urlResult.protocol, + additionalData + ) + + if (fetched.exception) { + return sendErrorResponse( + req, + res, + 500, + 'Internal Server Error', + fetched.exception.message, + parsedUrl.pathname + ) + } + + const pcmStream = createPCMStream( + fetched.stream, + fetched.type || urlResult.format, + nodelink, + volume / 100, + filters + ) + + pcmStream.on('error', (err) => { + logger( + 'error', + 'LoadStream', + `Pipeline component error: ${err.message} (${err.code})` + ) + }) + + res.writeHead(200, { + 'Content-Type': 'audio/l16;rate=48000;channels=2', + 'Transfer-Encoding': 'chunked', + Connection: 'keep-alive' + }) + + pipeline(pcmStream, res, (err) => { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + logger( + 'error', + 'LoadStream', + `Pipeline output failed for ${decodedTrack.info.title}: ${err.message}` + ) + } + + if (!pcmStream.destroyed) pcmStream.destroy() + if (fetched.stream && !fetched.stream.destroyed) fetched.stream.destroy() + }) + + res.on('close', () => { + if (!pcmStream.destroyed) pcmStream.destroy() + if (fetched.stream && !fetched.stream.destroyed) fetched.stream.destroy() + }) + } catch (err) { + logger('error', 'LoadStream', `Fatal handler error:`, err) + if (!res.writableEnded) { + sendErrorResponse( + req, + res, + 500, + 'Internal Server Error', + err.message, + parsedUrl.pathname + ) + } + } +} + +export default { + handler, + methods: ['GET', 'POST'] +} diff --git a/src/api/loadTracks.js b/src/api/loadTracks.js index 8d78072..79decb6 100644 --- a/src/api/loadTracks.js +++ b/src/api/loadTracks.js @@ -1,5 +1,5 @@ import myzod from 'myzod' -import { logger, sendResponse, sendErrorResponse } from '../utils.js' +import { logger, sendErrorResponse } from '../utils.js' const loadTracksSchema = myzod.object({ identifier: myzod.string() @@ -27,32 +27,60 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { const identifier = result.identifier logger('debug', 'Tracks', `Loading tracks with identifier: "${identifier}"`) + const re = + /^(?:(?(?:https?|ftts):\/\/\S+)|(?(?![A-Z]:\\)[A-Za-z0-9]+):(?(?!\/\/).+)|(?(?:\/|[A-Z]:\\|\\).+))$/i + const match = re.exec(identifier) + + let url, source, query + + if (match) { + url = match.groups.url + source = match.groups.source + query = match.groups.query + + if (match.groups.local) { + source = 'local' + query = match.groups.local + } + } else { + source = Array.isArray(nodelink.options.defaultSearchSource) + ? nodelink.options.defaultSearchSource[0] + : nodelink.options.defaultSearchSource + query = identifier + } + try { + if (nodelink.sourceWorkerManager) { + let task = '' + let payload = {} + + if (url) { + task = 'resolve' + payload = { url } + } else if (source === 'search') { + task = 'unifiedSearch' + payload = { query } + } else { + task = 'search' + payload = { source, query } + } + + const delegated = nodelink.sourceWorkerManager.delegate( + req, + res, + task, + payload + ) + if (delegated) return + } + let result - if (nodelink.workerManager) { + if (nodelink.workerManager && !nodelink.sourceWorkerManager) { const worker = nodelink.workerManager.getBestWorker() result = await nodelink.workerManager.execute(worker, 'loadTracks', { identifier }) } else { - const re = - /^(?:(?(?:https?|ftts):\/\/\S+)|(?[A-Za-z0-9]+):(?[^/\s].*))$/i - const match = re.exec(identifier) - if (!match) { - logger('warn', 'Tracks', `Invalid identifier: "${identifier}"`) - return sendErrorResponse( - req, - res, - 400, - 'invalid identifier parameter', - 'identifier parameter is invalid', - parsedUrl.pathname, - true - ) - } - - const { url, source, query } = match.groups - if (url) { result = await nodelink.sources.resolve(url) } else if (source === 'search') { @@ -61,6 +89,7 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { result = await nodelink.sources.search(source, query) } } + return sendResponse(req, res, result, 200) } catch (err) { logger( diff --git a/src/api/metrics.js b/src/api/metrics.js index 45feea7..4e5ab15 100644 --- a/src/api/metrics.js +++ b/src/api/metrics.js @@ -1,26 +1,25 @@ /** - * - * @param {import('../index').NodelinkServer} nodelink - * @param {*} req - * @param {*} res - * @returns + * + * @param {import('../index').NodelinkServer} nodelink + * @param {*} req + * @param {*} res + * @returns */ -async function handler(nodelink, req, res) { - const register = nodelink.statsManager.promRegister; +async function handler(nodelink, _req, res) { + const register = nodelink.statsManager.promRegister - if (!register) { - res.writeHead(503, { 'Content-Type': 'text/plain' }) - res.end('Metrics are disabled') - return true - } - - res.writeHead(200, { 'Content-Type': register.contentType }) - - res.end(await register.metrics()) + if (!register) { + res.writeHead(503, { 'Content-Type': 'text/plain' }) + res.end('Metrics are disabled') return true } - - export default { - handler - } - \ No newline at end of file + + res.writeHead(200, { 'Content-Type': register.contentType }) + + res.end(await register.metrics()) + return true +} + +export default { + handler +} diff --git a/src/api/routeplanner.js b/src/api/routeplanner.js index 9b12343..5eef4be 100644 --- a/src/api/routeplanner.js +++ b/src/api/routeplanner.js @@ -1,5 +1,5 @@ import myzod from 'myzod' -import { sendResponse, sendErrorResponse } from '../utils.js' +import { sendErrorResponse, sendResponse } from '../utils.js' function getStatus(nodelink, req, res) { const routePlanner = nodelink.routePlanner @@ -64,7 +64,7 @@ function freeAddress(nodelink, req, res) { res.end() } -function freeAll(nodelink, req, res) { +function freeAll(nodelink, _req, res) { nodelink.routePlanner.freeAll() res.writeHead(204) res.end() @@ -82,7 +82,7 @@ const routes = { } } -function handler(nodelink, req, res, sendResponse, parsedUrl) { +function handler(nodelink, req, res, _sendResponse, parsedUrl) { const route = routes[parsedUrl.pathname] if (route) { const methodHandler = route[req.method] diff --git a/src/api/sessions.id.js b/src/api/sessions.id.js index 53bd0a8..c253c5f 100644 --- a/src/api/sessions.id.js +++ b/src/api/sessions.id.js @@ -1,10 +1,5 @@ import myzod from 'myzod' -import { - decodeTrack, - logger, - sendResponse, - sendErrorResponse -} from '../utils.js' +import { logger, sendErrorResponse } from '../utils.js' const sessionPatchSchema = myzod .object({ diff --git a/src/api/sessions.id.players.id.mix.id.js b/src/api/sessions.id.players.id.mix.id.js index e8e3a24..900f83b 100644 --- a/src/api/sessions.id.players.id.mix.id.js +++ b/src/api/sessions.id.players.id.mix.id.js @@ -18,7 +18,7 @@ const pathSchema = myzod.object({ mixId: myzod.string() }) -async function handler(nodelink, req, res, sendResponse, parsedUrl) { +async function handler(nodelink, req, res, _sendResponse, parsedUrl) { const method = req.method const pathParts = parsedUrl.pathname.split('/') const sessionId = pathParts[3] @@ -67,11 +67,7 @@ async function handleUpdateMix(req, res, sessionId, guildId, mixId, nodelink) { return sendErrorResponse(req, res, 500, 'Player manager not initialized') } - const updated = await session.players.updateMix( - guildId, - mixId, - body.volume - ) + const updated = await session.players.updateMix(guildId, mixId, body.volume) if (!updated) { return sendErrorResponse(req, res, 404, 'Mix not found') diff --git a/src/api/sessions.id.players.id.mix.js b/src/api/sessions.id.players.id.mix.js index 9ec1450..12b56cb 100644 --- a/src/api/sessions.id.players.id.mix.js +++ b/src/api/sessions.id.players.id.mix.js @@ -1,5 +1,5 @@ import myzod from 'myzod' -import { decodeTrack, logger, sendErrorResponse, sendResponse } from '../utils.js' +import { decodeTrack, logger, sendErrorResponse } from '../utils.js' const mixTrackSchema = myzod .object({ @@ -52,7 +52,14 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { return sendErrorResponse(req, res, 405, 'Method Not Allowed') } -async function handleCreateMix(req, res, sessionId, guildId, nodelink, sendResponse) { +async function handleCreateMix( + req, + res, + sessionId, + guildId, + nodelink, + sendResponse +) { try { let body = req.body if (typeof body === 'string') { @@ -74,8 +81,13 @@ async function handleCreateMix(req, res, sessionId, guildId, nodelink, sendRespo return sendErrorResponse(req, res, 500, 'Player manager not initialized') } - const mixConfig = nodelink.options?.mix ?? { enabled: true, defaultVolume: 0.8, maxLayersMix: 5, autoCleanup: true } - + const mixConfig = nodelink.options?.mix ?? { + enabled: true, + defaultVolume: 0.8, + maxLayersMix: 5, + autoCleanup: true + } + if (!mixConfig.enabled) { return sendErrorResponse(req, res, 403, 'Mix feature is disabled') } @@ -93,29 +105,27 @@ async function handleCreateMix(req, res, sessionId, guildId, nodelink, sendRespo } } else { return sendErrorResponse( - req, res, + req, + res, 400, 'Track must have either encoded or identifier' ) } - const result = await session.players.addMix( - guildId, - trackData, - body.volume - ) + const result = await session.players.addMix(guildId, trackData, body.volume) - logger( - 'debug', - 'MixAPI', - `Created mix ${result.id} for guild ${guildId}` - ) + logger('debug', 'MixAPI', `Created mix ${result.id} for guild ${guildId}`) - return sendResponse(req, res, { - id: result.id, - track: result.track, - volume: result.volume - }, 201) + return sendResponse( + req, + res, + { + id: result.id, + track: result.track, + volume: result.volume + }, + 201 + ) } catch (error) { if (error instanceof myzod.ValidationError) { return sendErrorResponse(req, res, 400, error.message) @@ -125,7 +135,14 @@ async function handleCreateMix(req, res, sessionId, guildId, nodelink, sendRespo } } -async function handleGetMixes(req, res, sessionId, guildId, nodelink, sendResponse) { +async function handleGetMixes( + req, + res, + sessionId, + guildId, + nodelink, + sendResponse +) { try { const session = nodelink.sessions.get(sessionId) if (!session) { diff --git a/src/api/sessions.id.players.js b/src/api/sessions.id.players.js index 6ad1473..a620d1a 100644 --- a/src/api/sessions.id.players.js +++ b/src/api/sessions.id.players.js @@ -8,7 +8,8 @@ const voiceStateSchema = myzod .object({ token: myzod.string(), endpoint: myzod.string(), - sessionId: myzod.string() + sessionId: myzod.string(), + channelId: myzod.string().optional() }) .allowUnknownKeys() @@ -190,7 +191,12 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { await session.players.create(guildId) if (payload.voice) { - const { endpoint, token, sessionId: voiceSessionId } = payload.voice + const { + endpoint, + token, + sessionId: voiceSessionId, + channelId + } = payload.voice const currentPlayer = session.players.get(guildId) if ( currentPlayer && @@ -215,7 +221,7 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { let trackToPlay = null let stopPlayer = false - let userData = payload.track?.userData + const userData = payload.track?.userData const trackPayload = payload.track const legacyEncodedTrack = payload.encodedTrack @@ -255,7 +261,8 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { trackToPlay = { encoded: trackPayload.encoded, info: decodedTrack.info, - audioTrackId: trackPayload.language || trackPayload.audioTrackId || null + audioTrackId: + trackPayload.language || trackPayload.audioTrackId || null } } } else if (trackPayload.identifier) { @@ -287,7 +294,8 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { trackToPlay = { encoded: loadResult.data.encoded, info: loadResult.data.info, - audioTrackId: trackPayload.language || trackPayload.audioTrackId || null + audioTrackId: + trackPayload.language || trackPayload.audioTrackId || null } } else { const message = @@ -328,7 +336,7 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { if (stopPlayer) { const player = session.players.get(guildId) - if (player && player.isUpdatingTrack) { + if (player?.isUpdatingTrack) { logger( 'debug', 'PlayerUpdate', diff --git a/src/api/trackstream.js b/src/api/trackstream.js index a3580ff..d78cc14 100644 --- a/src/api/trackstream.js +++ b/src/api/trackstream.js @@ -1,10 +1,5 @@ import myzod from 'myzod' -import { - decodeTrack, - logger, - sendErrorResponse, - sendResponse -} from '../utils.js' +import { decodeTrack, logger, sendErrorResponse } from '../utils.js' const trackStreamSchema = myzod.object({ encodedTrack: myzod.string() diff --git a/src/api/version.js b/src/api/version.js index dfa7baa..f617246 100644 --- a/src/api/version.js +++ b/src/api/version.js @@ -1,4 +1,4 @@ -function handler(nodelink, req, res) { +function handler(nodelink, _req, res) { res.writeHead(200, { 'Content-Type': 'text/plain' }) res.end(`${nodelink.version}`) return true diff --git a/src/api/workers.js b/src/api/workers.js new file mode 100644 index 0000000..be56762 --- /dev/null +++ b/src/api/workers.js @@ -0,0 +1,171 @@ +import { sendErrorResponse, sendResponse } from '../utils.js' + +const LOOPBACKS = new Set(['127.0.0.1', '::1', '::ffff:127.0.0.1']) + +function getEndpointConfig(nodelink) { + const endpoint = nodelink.options?.cluster?.endpoint || {} + const code = + typeof endpoint.code === 'string' && endpoint.code.length > 0 + ? endpoint.code + : 'CAPYBARA' + + return { + patchEnabled: endpoint.patchEnabled === true, + allowExternalPatch: endpoint.allowExternalPatch === true, + code + } +} + +function normalizeNumber(value) { + if (typeof value === 'number' && Number.isInteger(value)) return value + if (typeof value === 'string' && value.trim() !== '') { + const parsed = Number(value) + if (Number.isInteger(parsed)) return parsed + } + return null +} + +function resolveWorkerId(manager, payload) { + const clusterId = normalizeNumber(payload.clusterId) + if (clusterId !== null && manager.workersById.has(clusterId)) { + return clusterId + } + + const uniqueId = normalizeNumber(payload.id) + if (uniqueId !== null) { + for (const [id, workerUniqueId] of manager.workerUniqueId.entries()) { + if (workerUniqueId === uniqueId) return id + } + } + + const pid = normalizeNumber(payload.pid) + if (pid !== null) { + const worker = manager.workers.find((entry) => entry?.process?.pid === pid) + if (worker) return worker.id + } + + return null +} + +function handleGet(nodelink, req, res) { + const manager = nodelink.workerManager + if (!manager) return sendResponse(req, res, [], 200) + + const metrics = manager.getWorkerMetrics() + const workers = Object.entries(metrics).map(([id, data]) => ({ + id: Number(id), + ...data + })) + + return sendResponse(req, res, workers, 200) +} + +function handlePatch(nodelink, req, res, parsedUrl) { + const manager = nodelink.workerManager + if (!manager) { + return sendErrorResponse( + req, + res, + 409, + 'Conflict', + 'Cluster workers are not enabled.', + parsedUrl.pathname + ) + } + + const endpointConfig = getEndpointConfig(nodelink) + if (!endpointConfig.patchEnabled) { + return sendErrorResponse( + req, + res, + 403, + 'Forbidden', + 'Workers patch endpoint is disabled.', + parsedUrl.pathname + ) + } + + const remoteAddress = req.socket?.remoteAddress || '' + if (!endpointConfig.allowExternalPatch && !LOOPBACKS.has(remoteAddress)) { + return sendErrorResponse( + req, + res, + 403, + 'Forbidden', + 'External access to the workers patch endpoint is blocked.', + parsedUrl.pathname + ) + } + + const payload = req.body && typeof req.body === 'object' ? req.body : {} + if (payload.code !== endpointConfig.code) { + return sendErrorResponse( + req, + res, + 403, + 'Forbidden', + 'Invalid workers patch code.', + parsedUrl.pathname + ) + } + + const workerId = resolveWorkerId(manager, payload) + if (!workerId) { + return sendErrorResponse( + req, + res, + 400, + 'Bad Request', + 'Worker identifier is required.', + parsedUrl.pathname + ) + } + + const worker = manager.workersById.get(workerId) + if (!worker) { + return sendErrorResponse( + req, + res, + 404, + 'Not Found', + 'Worker not found.', + parsedUrl.pathname + ) + } + + const uniqueId = manager.workerUniqueId.get(workerId) || workerId + const pid = worker.process?.pid || null + + manager.removeWorker(workerId) + + return sendResponse( + req, + res, + { + killed: true, + id: uniqueId, + clusterId: workerId, + pid + }, + 200 + ) +} + +function handler(nodelink, req, res, _sendResponse, parsedUrl) { + if (req.method === 'GET') return handleGet(nodelink, req, res) + if (req.method === 'PATCH') return handlePatch(nodelink, req, res, parsedUrl) + + return sendErrorResponse( + req, + res, + 405, + 'Method Not Allowed', + 'Method must be GET or PATCH.', + parsedUrl.pathname + ) +} + +export default { + handler, + methods: ['GET', 'PATCH'] +} diff --git a/src/api/youtube.config.js b/src/api/youtube.config.js index 324579e..a4f3799 100644 --- a/src/api/youtube.config.js +++ b/src/api/youtube.config.js @@ -1,11 +1,13 @@ import myzod from 'myzod' import OAuth from '../sources/youtube/OAuth.js' -import { logger, sendResponse, sendErrorResponse } from '../utils.js' +import { logger, sendErrorResponse } from '../utils.js' -const configSchema = myzod.object({ - refreshToken: myzod.string().min(1).optional(), - visitorData: myzod.string().min(1).optional() -}).allowUnknownKeys() +const configSchema = myzod + .object({ + refreshToken: myzod.string().min(1).optional(), + visitorData: myzod.string().min(1).optional() + }) + .allowUnknownKeys() function maskString(str, visibleChars = 5) { if (!str) return null @@ -19,10 +21,13 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { let currentVisitorData = null if (nodelink.workerManager) { - currentRefreshToken = nodelink.workerManager.liveYoutubeConfig.refreshToken + currentRefreshToken = + nodelink.workerManager.liveYoutubeConfig.refreshToken currentVisitorData = nodelink.workerManager.liveYoutubeConfig.visitorData - - if (!currentRefreshToken) currentRefreshToken = nodelink.options.sources.youtube?.clients?.settings?.TV?.refreshToken + + if (!currentRefreshToken) + currentRefreshToken = + nodelink.options.sources.youtube?.clients?.settings?.TV?.refreshToken if (!currentVisitorData) currentVisitorData = null } else { const youtube = nodelink.sources?.sources?.get('youtube') @@ -33,23 +38,30 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { } let isValid = null - if (parsedUrl.searchParams.get('validate') === 'true' && currentRefreshToken) { + if ( + parsedUrl.searchParams.get('validate') === 'true' && + currentRefreshToken + ) { try { const validator = new OAuth(nodelink) validator.refreshToken = currentRefreshToken validator.accessToken = null validator.tokenExpiry = 0 - + const token = await validator.getAccessToken() isValid = !!token - } catch (e) { + } catch (_e) { isValid = false } } const response = { - refreshToken: currentRefreshToken ? maskString(currentRefreshToken, 7) : null, - visitorData: currentVisitorData ? maskString(currentVisitorData, 10) : null, + refreshToken: currentRefreshToken + ? maskString(currentRefreshToken, 7) + : null, + visitorData: currentVisitorData + ? maskString(currentVisitorData, 10) + : null, isConfigured: !!currentRefreshToken, isValid } @@ -85,7 +97,11 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { } if (refreshToken) { - logger('info', 'API', 'Sandboxing new YouTube refresh token for validation.') + logger( + 'info', + 'API', + 'Sandboxing new YouTube refresh token for validation.' + ) try { const sandboxOAuth = new OAuth(nodelink) sandboxOAuth.refreshToken = refreshToken @@ -95,11 +111,21 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { const accessToken = await sandboxOAuth.getAccessToken() if (!accessToken) { - throw new Error('Google rejected the refresh token (Invalid Grant or similar).') + throw new Error( + 'Google rejected the refresh token (Invalid Grant or similar).' + ) } - logger('info', 'API', 'YouTube refresh token validated successfully in sandbox.') + logger( + 'info', + 'API', + 'YouTube refresh token validated successfully in sandbox.' + ) } catch (error) { - logger('warn', 'API', `YouTube token validation failed: ${error.message}`) + logger( + 'warn', + 'API', + `YouTube token validation failed: ${error.message}` + ) return sendErrorResponse( req, res, @@ -120,24 +146,29 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { logger('info', 'API', 'Master LiveConfig updated for future workers.') logger('info', 'API', 'Propagating YouTube config to cluster workers.') - + const promises = nodelink.workerManager.workers - .filter(w => w.isConnected()) - .map(worker => - nodelink.workerManager.execute(worker, 'updateYoutubeConfig', payload) + .filter((w) => w.isConnected()) + .map((worker) => + nodelink.workerManager + .execute(worker, 'updateYoutubeConfig', payload) .then(() => 1) - .catch(err => { - logger('error', 'API', `Failed to update worker ${worker.id}: ${err.message}`) + .catch((err) => { + logger( + 'error', + 'API', + `Failed to update worker ${worker.id}: ${err.message}` + ) return 0 }) ) - + const results = await Promise.all(promises) updatedCount = results.reduce((a, b) => a + b, 0) } else { logger('info', 'API', 'Updating local YouTube source.') const youtube = nodelink.sources?.sources?.get('youtube') - + if (youtube) { if (refreshToken) { if (youtube.oauth) { @@ -163,13 +194,18 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { { message: 'YouTube configuration updated successfully.', workersUpdated: updatedCount, - fieldsUpdated: Object.keys(payload).filter(k => payload[k] !== undefined) + fieldsUpdated: Object.keys(payload).filter( + (k) => payload[k] !== undefined + ) }, 200 ) - } catch (err) { - logger('error', 'API', `Critical error during config propagation: ${err.message}`) + logger( + 'error', + 'API', + `Critical error during config propagation: ${err.message}` + ) return sendErrorResponse( req, res, diff --git a/src/api/youtube.oauth.js b/src/api/youtube.oauth.js index 9d065f2..beda088 100644 --- a/src/api/youtube.oauth.js +++ b/src/api/youtube.oauth.js @@ -1,21 +1,22 @@ import myzod from 'myzod' -import { logger, sendResponse, sendErrorResponse, makeRequest } from '../utils.js' +import { logger, makeRequest, sendErrorResponse } from '../utils.js' -const CLIENT_ID = '861556708454-d6dlm3lh05idd8npek18k6be8ba3oc68.apps.googleusercontent.com' +const CLIENT_ID = + '861556708454-d6dlm3lh05idd8npek18k6be8ba3oc68.apps.googleusercontent.com' const CLIENT_SECRET = 'SboVhoG9s0rNafixCSGGKXAT' -const schema = myzod.object({ +const _schema = myzod.object({ refreshToken: myzod.string().min(1) }) -async function handler(nodelink, req, res, sendResponse, parsedUrl) { +async function handler(_nodelink, req, res, sendResponse, parsedUrl) { let refreshToken = null if (req.method === 'GET') { refreshToken = parsedUrl.searchParams.get('refreshToken') } else if (req.method === 'POST') { const body = req.body - if (body && body.refreshToken) { + if (body?.refreshToken) { refreshToken = body.refreshToken } } @@ -46,8 +47,16 @@ async function handler(nodelink, req, res, sendResponse, parsedUrl) { ) if (error || statusCode !== 200) { - const msg = error?.message || body?.error_description || 'Failed to refresh token' - return sendErrorResponse(req, res, 500, 'Internal Server Error', msg, parsedUrl.pathname) + const msg = + error?.message || body?.error_description || 'Failed to refresh token' + return sendErrorResponse( + req, + res, + 500, + 'Internal Server Error', + msg, + parsedUrl.pathname + ) } if (body.error) { diff --git a/src/constants.js b/src/constants.js index 42c97fd..2091911 100644 --- a/src/constants.js +++ b/src/constants.js @@ -42,6 +42,7 @@ export const SupportedFormats = { FLAC: 'flac', OGG_VORBIS: 'ogg-vorbis', WAV: 'wav', + FLV: 'flv', UNKNOWN: 'unknown' } @@ -69,6 +70,7 @@ export function normalizeFormat(type) { if (lowerType.includes('ogg') || lowerType.includes('vorbis')) return SupportedFormats.OGG_VORBIS if (lowerType.includes('wav')) return SupportedFormats.WAV + if (lowerType.includes('flv')) return SupportedFormats.FLV return SupportedFormats.UNKNOWN } diff --git a/src/index.js b/src/index.js index 281f8ab..1106a5a 100644 --- a/src/index.js +++ b/src/index.js @@ -2,18 +2,17 @@ import cluster from 'node:cluster' import { EventEmitter } from 'node:events' import http from 'node:http' import path from 'node:path' -import { fileURLToPath, pathToFileURL } from 'node:url' +import { pathToFileURL } from 'node:url' import WebSocketServer from '@performanc/pwsl-server' import requestHandler from './api/index.js' import connectionManager from './managers/connectionManager.js' -import lyricsManager from './managers/lyricsManager.js' +import CredentialManager from './managers/credentialManager.js' import routePlannerManager from './managers/routePlannerManager.js' import sessionManager from './managers/sessionManager.js' -import sourceManager from './managers/sourceManager.js' import statsManager from './managers/statsManager.js' -import OAuth from './sources/youtube/OAuth.js' import { + applyEnvOverrides, checkForUpdates, cleanupHttpAgents, cleanupLogger, @@ -24,15 +23,17 @@ import { logger, parseClient, validateProperty, - verifyDiscordID, - applyEnvOverrides + verifyDiscordID } from './utils.js' import 'dotenv/config' import { GatewayEvents } from './constants.js' import DosProtectionManager from './managers/dosProtectionManager.js' import PlayerManager from './managers/playerManager.js' -import RateLimitManager from './managers/rateLimitManager.js' import PluginManager from './managers/pluginManager.js' +import RateLimitManager from './managers/rateLimitManager.js' +import SourceWorkerManager from './managers/sourceWorkerManager.js' +import { parseVoiceFrameHeader } from './voice/voiceFrames.js' +import { createVoiceRelay } from './voice/voiceRelay.js' let config @@ -66,18 +67,18 @@ try { } // Apply environment variable overrides after config is loaded -applyEnvOverrides(config); +applyEnvOverrides(config) const clusterEnabled = process.env.CLUSTER_ENABLED?.toLowerCase() === 'true' || (typeof config.cluster?.enabled === 'boolean' && config.cluster.enabled) || false -let configuredWorkers = 0 +let _configuredWorkers = 0 if (process.env.CLUSTER_WORKERS) - configuredWorkers = Number(process.env.CLUSTER_WORKERS) + _configuredWorkers = Number(process.env.CLUSTER_WORKERS) else if (typeof config.cluster?.workers === 'number') - configuredWorkers = config.cluster.workers + _configuredWorkers = config.cluster.workers initLogger(config) @@ -101,12 +102,25 @@ class BunSocketWrapper extends EventEmitter { constructor(ws) { super() this.ws = ws - this.remoteAddress = ws.remoteAddress - this.readyState = ws.readyState + this.remoteAddress = ws?.data?.remoteAddress } send(data) { - return this.ws.send(data) > 0 + try { + const r = this.ws.send(data) + return r !== 0 + } catch { + return false + } + } + + ping(data) { + try { + this.ws.ping?.(data) + return true + } catch { + return false + } } close(code, reason) { @@ -130,31 +144,38 @@ let registry = null if (process.embedder === 'nodejs') { try { registry = await import('./registry.js') - } catch (e) {} + } catch (_e) {} } -class NodelinkServer { +class NodelinkServer extends EventEmitter { constructor(options, PlayerManagerClass, isClusterPrimary = false) { + super() if (!options || Object.keys(options).length === 0) throw new Error('Configuration file not found or empty') this.options = options this.logger = logger this.server = null this.socket = null + + this._usingBunServer = Boolean(isBun && options?.server?.useBunServer) + this.sessions = new sessionManager(this, PlayerManagerClass) - if (!isClusterPrimary) { - this.sources = new sourceManager(this) - this.lyrics = new lyricsManager(this) - } else { - this.sources = null - this.lyrics = null - } + this.sources = null + this.lyrics = null + + this._sourceInitPromise = this._initSources(isClusterPrimary, options) + this.routePlanner = new routePlannerManager(this) + this.credentialManager = new CredentialManager(this) this.connectionManager = new connectionManager(this) this.statsManager = new statsManager(this) this.rateLimitManager = new RateLimitManager(this) this.dosProtectionManager = new DosProtectionManager(this) this.pluginManager = new PluginManager(this) + this.sourceWorkerManager = + isClusterPrimary && options.cluster?.specializedSourceWorker?.enabled + ? new SourceWorkerManager(this) + : null this.registry = registry this.version = getVersion() this.gitInfo = getGitInfo() @@ -162,7 +183,7 @@ class NodelinkServer { players: 0, playingPlayers: 0 } - + this.extensions = { sources: new Map(), filters: new Map(), @@ -173,11 +194,21 @@ class NodelinkServer { audioInterceptors: [], playerInterceptors: [] } - + + this.voiceSockets = new Map() + this.voiceRelay = createVoiceRelay({ + enabled: options.voiceReceive?.enabled, + format: options.voiceReceive?.format, + sendFrame: (frame) => this.handleVoiceFrame(frame), + logger + }) + this._globalUpdater = null + this._statsUpdater = null this.supportedSourcesCache = null + this._heartbeatInterval = null - if (isBun) { + if (this._usingBunServer) { this.socket = new EventEmitter() } else { this.socket = new WebSocketServer({ noServer: true }) @@ -191,6 +222,89 @@ class NodelinkServer { ) } + async _initSources(isClusterPrimary, _options) { + if (!isClusterPrimary) { + const [{ default: sourceMan }, { default: lyricsMan }] = + await Promise.all([ + import('./managers/sourceManager.js'), + import('./managers/lyricsManager.js') + ]) + this.sources = new sourceMan(this) + this.lyrics = new lyricsMan(this) + } + } + + _startHeartbeat() { + if (this._heartbeatInterval) return + + this._heartbeatInterval = setInterval(() => { + for (const session of this.sessions.activeSessions.values()) { + if (session.socket && !session.isPaused) { + try { + if (typeof session.socket.sendFrame === 'function') { + session.socket.sendFrame(Buffer.alloc(0), { + len: 0, + fin: true, + opcode: 0x09 + }) + } else if (typeof session.socket.ping === 'function') { + session.socket.ping() + } + } catch (_e) { + logger( + 'debug', + 'Server', + `Failed to send heartbeat to session ${session.id}` + ) + } + } + } + }, 45000) + } + + _stopHeartbeat() { + if (this._heartbeatInterval) { + clearInterval(this._heartbeatInterval) + this._heartbeatInterval = null + } + } + + handleVoiceFrame(frame) { + const header = parseVoiceFrameHeader(frame) + if (!header?.guildId) return + + const sockets = this.voiceSockets.get(header.guildId) + if (!sockets || sockets.size === 0) return + + for (const socket of sockets) { + try { + socket.send(frame) + } catch {} + } + } + + registerVoiceSocket(guildId, socket) { + if (!guildId || !socket) return + + let sockets = this.voiceSockets.get(guildId) + if (!sockets) { + sockets = new Set() + this.voiceSockets.set(guildId, sockets) + } + + sockets.add(socket) + + const cleanup = () => { + const set = this.voiceSockets.get(guildId) + if (!set) return + set.delete(socket) + if (set.size === 0) this.voiceSockets.delete(guildId) + } + + socket.on('close', cleanup) + socket.on('error', cleanup) + } + async getSourcesFromWorker() { if (!this.workerManager) { return [] @@ -205,14 +319,27 @@ class NodelinkServer { } _validateConfig() { + const validateNonNegativeInt = (value, path) => + validateProperty( + value, + path, + 'integer >= 0', + (v) => Number.isInteger(v) && v >= 0 + ) + + const validatePositiveInt = (value, path) => + validateProperty( + value, + path, + 'integer > 0', + (v) => Number.isInteger(v) && v > 0 + ) + validateProperty( this.options.server.port, 'server.port', 'integer between 1 and 65535', - (value) => - Number.isInteger(value) && - value >= 1 && - value <= 65535 + (value) => Number.isInteger(value) && value >= 1 && value <= 65535 ) validateProperty( @@ -226,39 +353,28 @@ class NodelinkServer { this.options.playerUpdateInterval, 'playerUpdateInterval', 'integer between 250 and 60000 (milliseconds)', - (value) => - Number.isInteger(value) && - value >= 250 && - value <= 60000 + (value) => Number.isInteger(value) && value >= 250 && value <= 60000 ) validateProperty( this.options.maxSearchResults, 'maxSearchResults', 'integer between 1 and 100', - (value) => - Number.isInteger(value) && - value >= 1 && - value <= 100 + (value) => Number.isInteger(value) && value >= 1 && value <= 100 ) validateProperty( this.options.maxAlbumPlaylistLength, 'maxAlbumPlaylistLength', 'integer between 1 and 500', - (value) => - Number.isInteger(value) && - value >= 1 && - value <= 500 + (value) => Number.isInteger(value) && value >= 1 && value <= 500 ) validateProperty( this.options.trackStuckThresholdMs, 'trackStuckThresholdMs', 'integer >= 1000 (milliseconds)', - (value) => - Number.isInteger(value) && - value >= 1000 + (value) => Number.isInteger(value) && value >= 1000 ) validateProperty( @@ -266,18 +382,10 @@ class NodelinkServer { 'zombieThresholdMs', `integer > trackStuckThresholdMs (${this.options.trackStuckThresholdMs})`, (value) => - Number.isInteger(value) && - value > this.options.trackStuckThresholdMs + Number.isInteger(value) && value > this.options.trackStuckThresholdMs ) - validateProperty( - this.options.cluster.workers, - 'cluster.workers', - 'integer >= 0', - (value) => - Number.isInteger(value) && - value >= 0 - ) + validateNonNegativeInt(this.options.cluster.workers, 'cluster.workers') validateProperty( this.options.cluster.minWorkers, @@ -291,14 +399,18 @@ class NodelinkServer { (this.options.cluster.workers === 0 || value <= this.options.cluster.workers) ) - + validateProperty( this.options.defaultSearchSource, 'defaultSearchSource', - 'key of an enabled source in config.sources', - (v) => - typeof v === 'string' && - Boolean(this.options.sources?.[v]?.enabled) + 'key or array of keys of enabled sources in config.sources', + (v) => { + const sources = Array.isArray(v) ? v : [v] + return sources.every( + (s) => + typeof s === 'string' && Boolean(this.options.sources?.[s]?.enabled) + ) + } ) validateProperty( @@ -307,14 +419,14 @@ class NodelinkServer { "one of ['high', 'medium', 'low', 'lowest']", (v) => ['high', 'medium', 'low', 'lowest'].includes(v) ) - + validateProperty( this.options.audio.resamplingQuality, 'audio.resamplingQuality', "one of ['best', 'medium', 'fastest', 'zero', 'linear']", (v) => ['best', 'medium', 'fastest', 'zero', 'linear'].includes(v) ) - + validateProperty( this.options.routePlanner?.strategy, 'routePlanner.strategy', @@ -324,64 +436,149 @@ class NodelinkServer { ['RotateOnBan', 'RoundRobin', 'LoadBalance'].includes(v) ) - validateProperty( - this.options.routePlanner?.bannedIpCooldown, - 'routePlanner.bannedIpCooldown', - 'integer > 0 (milliseconds)', - (v) => Number.isInteger(v) && v > 0 - ) - + if (this.options.routePlanner?.bannedIpCooldown !== undefined) { + validatePositiveInt( + this.options.routePlanner.bannedIpCooldown, + 'routePlanner.bannedIpCooldown' + ) + } - const rateLimitSections = [ - 'global', - 'perIp', - 'perUserId', - 'perGuildId' - ] + const rateLimitSections = ['global', 'perIp', 'perUserId', 'perGuildId'] - if (this.options.rateLimit?.enabled !== false) { + if (this.options.rateLimit?.enabled !== false) { for (let i = 0; i < rateLimitSections.length; i++) { const section = rateLimitSections[i] const config = this.options.rateLimit?.[section] - + if (!config) continue - - validateProperty( + + validatePositiveInt( config.maxRequests, - `rateLimit.${section}.maxRequests`, - 'integer > 0', - (value) => - Number.isInteger(value) && - value > 0 + `rateLimit.${section}.maxRequests` ) - validateProperty( + validatePositiveInt( config.timeWindowMs, - `rateLimit.${section}.timeWindowMs`, - 'integer > 0 (milliseconds)', - (value) => - Number.isInteger(value) && - value > 0 + `rateLimit.${section}.timeWindowMs` ) if (i === 0) continue - + const parentSection = rateLimitSections[i - 1] const parentConfig = this.options.rateLimit?.[parentSection] - + if (!parentConfig) continue - + validateProperty( config.maxRequests, `rateLimit.${section}.maxRequests`, `integer <= rateLimit.${parentSection}.maxRequests (${parentConfig.maxRequests})`, (value) => Number.isInteger(value) && - value > 0 && - value <= parentConfig.maxRequests + value > 0 && + value <= parentConfig.maxRequests + ) + } + } + + const spotify = this.options.sources?.spotify + const applemusic = this.options.sources?.applemusic + const tidal = this.options.sources?.tidal + const jiosaavn = this.options.sources?.jiosaavn + + if (spotify?.enabled) { + validateNonNegativeInt( + spotify.playlistLoadLimit, + 'sources.spotify.playlistLoadLimit' + ) + + validateNonNegativeInt( + spotify.albumLoadLimit, + 'sources.spotify.albumLoadLimit' + ) + + validatePositiveInt( + spotify.playlistPageLoadConcurrency, + 'sources.spotify.playlistPageLoadConcurrency' + ) + + validatePositiveInt( + spotify.albumPageLoadConcurrency, + 'sources.spotify.albumPageLoadConcurrency' + ) + + const credsComplete = + Boolean(spotify.clientId) === Boolean(spotify.clientSecret) + + validateProperty( + credsComplete, + 'sources.spotify.credentials', + 'clientId and clientSecret must be set together', + (v) => v === true + ) + } + + if (applemusic?.enabled) { + validateNonNegativeInt( + applemusic.playlistLoadLimit, + 'sources.applemusic.playlistLoadLimit' + ) + + validateNonNegativeInt( + applemusic.albumLoadLimit, + 'sources.applemusic.albumLoadLimit' + ) + + validatePositiveInt( + applemusic.playlistPageLoadConcurrency, + 'sources.applemusic.playlistPageLoadConcurrency' + ) + + validatePositiveInt( + applemusic.albumPageLoadConcurrency, + 'sources.applemusic.albumPageLoadConcurrency' + ) + } + + if (tidal?.enabled) { + validateNonNegativeInt( + tidal.playlistLoadLimit, + 'sources.tidal.playlistLoadLimit' + ) + + validatePositiveInt( + tidal.playlistPageLoadConcurrency, + 'sources.tidal.playlistPageLoadConcurrency' + ) + + if (tidal.token !== undefined) { + validateProperty( + tidal.token, + 'sources.tidal.token', + 'string (non-whitespace if provided)', + (v) => typeof v === 'string' && (v === '' || v.trim().length > 0) ) } } + + if (jiosaavn?.enabled) { + validateNonNegativeInt( + jiosaavn.playlistLoadLimit, + 'sources.jiosaavn.playlistLoadLimit' + ) + + validateNonNegativeInt( + jiosaavn.artistLoadLimit, + 'sources.jiosaavn.artistLoadLimit' + ) + + validateProperty( + jiosaavn.playlistLoadLimit, + 'sources.jiosaavn.playlistLoadLimit', + `integer >= artistLoadLimit (${jiosaavn.artistLoadLimit})`, + (v) => v >= jiosaavn.artistLoadLimit + ) + } } _setupSocketEvents() { @@ -406,7 +603,12 @@ class NodelinkServer { } for (const interceptor of interceptors) { - const handled = await interceptor(this, socket, parsedData, clientInfo) + const handled = await interceptor( + this, + socket, + parsedData, + clientInfo + ) if (handled === true) return } } @@ -428,9 +630,40 @@ class NodelinkServer { logger( 'info', 'Server', - `\x1b[36m${clientInfo.name}\x1b[0m${clientInfo.version ? `/\x1b[32mv${clientInfo.version}\x1b[0m` : ''} resumed session with ID: ${oldSessionId}` + `\x1b[36m${clientInfo.name}\x1b[0m${ + clientInfo.version + ? `/\x1b[32mv${clientInfo.version}\x1b[0m` + : '' + } resumed session with ID: ${oldSessionId}` ) this.statsManager.incrementSessionResume(clientInfo.name, true) + + socket.on('close', (code, reason) => { + if (!this.sessions.has(oldSessionId)) return + + const session = this.sessions.get(oldSessionId) + if (!session) return + + logger( + 'info', + 'Server', + `\x1b[36m${clientInfo.name}\x1b[0m/\x1b[32mv${ + clientInfo.version + }\x1b[0m disconnected with code ${code} and reason: ${ + reason || 'without reason' + }` + ) + + if (session.resuming) { + this.sessions.pause(oldSessionId) + } else { + this.sessions.shutdown(oldSessionId) + } + + const sessionCount = this.sessions.activeSessions?.size || 0 + this.statsManager.setWebsocketConnections(sessionCount) + }) + socket.send( JSON.stringify({ op: 'ready', @@ -462,6 +695,9 @@ class NodelinkServer { playerInfo._sendUpdate() } } + + const sessionCount = this.sessions.activeSessions?.size || 0 + this.statsManager.setWebsocketConnections(sessionCount) } } else { const sessionId = this.sessions.create(request, socket, clientInfo) @@ -478,7 +714,11 @@ class NodelinkServer { logger( 'info', 'Server', - `\x1b[36m${clientInfo.name}\x1b[0m${clientInfo.version ? `/\x1b[32mv${clientInfo.version}\x1b[0m` : ''} disconnected with code ${code} and reason: ${ + `\x1b[36m${clientInfo.name}\x1b[0m${ + clientInfo.version + ? `/\x1b[32mv${clientInfo.version}\x1b[0m` + : '' + } disconnected with code ${code} and reason: ${ reason || 'without reason' }` ) @@ -509,22 +749,13 @@ class NodelinkServer { const port = this.options.server.port const host = this.options.server.host || '0.0.0.0' const password = this.options.server.password - const useBun = this.options.server.useBunServer || false + const self = this - if (!useBun) { - logger( - 'warn', - 'Server', - 'Bun.serve usage is disabled in config, using standard Node.js HTTP server instead.' - ) - return - } logger( 'warn', 'Server', - `Running with Bun.serve, remember this is experimental!` + 'Running with Bun.serve, remember this is experimental!' ) - const self = this this.server = Bun.serve({ port, @@ -533,11 +764,11 @@ class NodelinkServer { async fetch(req, server) { const url = new URL(req.url) - const path = url.pathname.endsWith('/') + const pathname = url.pathname.endsWith('/') ? url.pathname.slice(0, -1) : url.pathname - if (path === '/v4/websocket') { + if (pathname === '/v4/websocket') { const remoteAddress = server.requestIP(req)?.address || 'unknown' const clientAddress = `[External] (${remoteAddress})` @@ -550,7 +781,7 @@ class NodelinkServer { logger( 'warn', 'Server', - `Unauthorized connection attempt from ${clientAddress} - Invalid Password` + `Unauthorized connection attempt from ${clientAddress} - Invalid password provided: ${auth || 'None'}` ) return new Response('Invalid password provided.', { status: 401, @@ -579,6 +810,17 @@ class NodelinkServer { } const clientInfo = parseClient(clientName) + if (!clientInfo) { + logger( + 'warn', + 'Server', + `Invalid client-name from ${clientAddress}` + ) + return new Response('Invalid or missing Client-Name header.', { + status: 400, + statusText: 'Bad Request' + }) + } const success = server.upgrade(req, { data: { @@ -590,10 +832,7 @@ class NodelinkServer { } }) - if (success) { - return undefined - } - + if (success) return undefined return new Response('WebSocket upgrade failed', { status: 400 }) } @@ -655,6 +894,8 @@ class NodelinkServer { }, websocket: { + sendPings: true, + open(ws) { const wrapper = new BunSocketWrapper(ws) ws.data.wrapper = wrapper @@ -670,26 +911,44 @@ class NodelinkServer { logger( 'info', 'Server', - `\x1b[36m${clientInfo.name}\x1b[0m${clientInfo.version ? `/\x1b[32mv${clientInfo.version}\x1b[0m` : ''} connected from [External] (${ws.data.remoteAddress}) | \x1b[33mURL:\x1b[0m ${ws.data.url}` + `\x1b[36m${clientInfo.name}\x1b[0m${ + clientInfo.version ? `/\x1b[32mv${clientInfo.version}\x1b[0m` : '' + } connected from [External] (${ws.data.remoteAddress}) | \x1b[33mURL:\x1b[0m ${ws.data.url}` ) + let eventName = '/v4/websocket' + let guildId = null + try { + const url = new URL(ws.data.url) + const voiceMatch = url.pathname.match( + /^\/v4\/websocket\/voice\/([A-Za-z0-9]+)\/?$/ + ) + if (voiceMatch) { + if (!self.options.voiceReceive?.enabled) { + try { + wrapper.close(1008, 'Voice receive disabled') + } catch {} + return + } + eventName = '/v4/websocket/voice' + guildId = voiceMatch[1] + } + } catch {} + self.socket.emit( - '/v4/websocket', + eventName, wrapper, reqShim, clientInfo, - sessionId + sessionId, + guildId ) }, message(ws, message) { - if (ws.data.wrapper) { - ws.data.wrapper._handleMessage(message) - } + ws.data.wrapper?._handleMessage(message) }, close(ws, code, reason) { - if (ws.data.wrapper) { - ws.data.wrapper._handleClose(code, reason) - } + ws.data.wrapper?._handleClose(code, reason) } } }) @@ -702,7 +961,7 @@ class NodelinkServer { } _createServer() { - if (isBun) { + if (this._usingBunServer) { this._createBunServer() return } @@ -711,6 +970,9 @@ class NodelinkServer { requestHandler(this, req, res) ) + this.server.keepAliveTimeout = 65000 + this.server.headersTimeout = 66000 + this.server.on('upgrade', (request, socket, head) => { const { remoteAddress, remotePort } = request.socket const isInternal = @@ -719,7 +981,9 @@ class NodelinkServer { const clientAddress = `${isInternal ? '[Internal]' : '[External]'} (${remoteAddress}:${remotePort})` const rejectUpgrade = (status, statusText, body) => { - socket.write(`HTTP/1.1 ${status} ${statusText}\r\nContent-Type: text/plain\r\nContent-Length: ${body.length}\r\n\r\n${body}`) + socket.write( + `HTTP/1.1 ${status} ${statusText}\r\nContent-Type: text/plain\r\nContent-Length: ${body.length}\r\n\r\n${body}` + ) socket.destroy() } @@ -739,7 +1003,7 @@ class NodelinkServer { logger( 'warn', 'Server', - `Unauthorized connection attempt from ${clientAddress} - Invalid password provided` + `Unauthorized connection attempt from ${clientAddress} - Invalid password provided: ${headers.authorization || 'None'}` ) return rejectUpgrade(401, 'Unauthorized', 'Invalid password provided.') } @@ -750,7 +1014,11 @@ class NodelinkServer { 'Server', `Unauthorized connection attempt from ${clientAddress} - Invalid client-name provided` ) - return rejectUpgrade(400, 'Bad Request', 'Invalid or missing Client-Name header.') + return rejectUpgrade( + 400, + 'Bad Request', + 'Invalid or missing Client-Name header.' + ) } let sessionId = headers['session-id'] @@ -768,7 +1036,11 @@ class NodelinkServer { request.url, `http://${request.headers.host}` ) - if (pathname === '/v4/websocket') { + const voiceMatch = pathname.match( + /^\/v4\/websocket\/voice\/([A-Za-z0-9]+)\/?$/ + ) + + if (pathname === '/v4/websocket' || voiceMatch) { if (!headers['user-id']) { logger( 'warn', @@ -785,30 +1057,88 @@ class NodelinkServer { ) return rejectUpgrade(400, 'Bad Request', 'Invalid User-Id header.') } + + if (voiceMatch && !this.options.voiceReceive?.enabled) { + return rejectUpgrade( + 404, + 'Not Found', + 'Voice websocket endpoint is disabled.' + ) + } + request.headers = headers logger( 'info', 'Server', - `\x1b[36m${clientInfo.name}\x1b[0m${clientInfo.version ? `/\x1b[32mv${clientInfo.version}\x1b[0m` : ''} connected from ${clientAddress} | \x1b[33mURL:\x1b[0m ${request.url}` + `\x1b[36m${clientInfo.name}\x1b[0m${ + clientInfo.version ? `/\x1b[32mv${clientInfo.version}\x1b[0m` : '' + } connected from ${clientAddress} | \x1b[33mURL:\x1b[0m ${request.url}` ) - this.socket.handleUpgrade(request, socket, head, {}, (ws) => - this.socket.emit('/v4/websocket', ws, request, clientInfo, sessionId) - ) + const eventName = voiceMatch ? '/v4/websocket/voice' : '/v4/websocket' + const guildId = voiceMatch ? voiceMatch[1] : null + + if (isBun && !this._usingBunServer) { + this.socket.handleUpgrade(request, socket, head, (ws) => { + this.socket.emit( + eventName, + ws, + request, + clientInfo, + sessionId, + guildId + ) + }) + } else { + this.socket.handleUpgrade(request, socket, head, {}, (ws) => + this.socket.emit( + eventName, + ws, + request, + clientInfo, + sessionId, + guildId + ) + ) + } } else { logger( 'warn', 'Server', `Unauthorized connection attempt from ${clientAddress} - Invalid path provided` ) - return rejectUpgrade(404, 'Not Found', 'Invalid path for WebSocket upgrade.') + return rejectUpgrade( + 404, + 'Not Found', + 'Invalid path for WebSocket upgrade.' + ) } }) + + this.socket.on( + '/v4/websocket/voice', + (socket, request, _clientInfo, _sessionId, guildId) => { + if (!this.options.voiceReceive?.enabled) { + try { + socket.close(1008, 'Voice receive disabled') + } catch {} + return + } + + logger( + 'info', + 'Voice', + `Voice websocket connected from ${request.socket?.remoteAddress || 'unknown'} | guild ${guildId}` + ) + + this.registerVoiceSocket(guildId, socket) + } + ) } _listen() { - if (isBun) return + if (!this.server || typeof this.server.listen !== 'function') return const port = this.options.server.port const host = this.options.server.host || '0.0.0.0' @@ -847,18 +1177,56 @@ class NodelinkServer { ) }) } + _startGlobalUpdater() { if (this._globalUpdater) return const updateInterval = Math.max( 1, this.options?.playerUpdateInterval ?? 5000 ) + const statsSendInterval = Math.max( + 1, + this.options?.statsUpdateInterval ?? 30000 + ) + const metricsInterval = this.options?.metrics?.enabled + ? 5000 + : statsSendInterval const zombieThreshold = this.options?.zombieThresholdMs ?? 60000 this._globalUpdater = setInterval(() => { + for (const session of this.sessions.values()) { + if (!session.players) continue + for (const player of session.players.players.values()) { + if (player?.track && !player.isPaused && player.connection) { + if ( + player._lastStreamDataTime > 0 && + Date.now() - player._lastStreamDataTime >= zombieThreshold + ) { + logger( + 'warn', + 'Player', + `Player for guild ${player.guildId} detected as zombie (no stream data).` + ) + player.emitEvent(GatewayEvents.TRACK_STUCK, { + guildId: player.guildId, + track: player.track, + reason: 'no_stream_data', + thresholdMs: zombieThreshold + }) + } + player._sendUpdate() + } + } + } + }, updateInterval) + + let lastStatsSendTime = 0 + this._statsUpdater = setInterval(() => { + const now = Date.now() let localPlayers = 0 let localPlayingPlayers = 0 let voiceConnections = 0 + for (const session of this.sessions.values()) { if (!session.players) continue for (const player of session.players.players.values()) { @@ -883,68 +1251,93 @@ class NodelinkServer { } }) } else if (!clusterEnabled) { - // In single-process mode, update the server's own statistics this.statistics.players = localPlayers this.statistics.playingPlayers = localPlayingPlayers } const stats = getStats(this) - const workerMetrics = this.workerManager ? this.workerManager.getWorkerMetrics() : null + const workerMetrics = this.workerManager + ? this.workerManager.getWorkerMetrics() + : null this.statsManager.updateStatsMetrics(stats, workerMetrics) - const statsPayload = JSON.stringify({ op: 'stats', ...stats }) - for (const session of this.sessions.values()) { - if (session.socket) { - session.socket.send(statsPayload) - } + if (now - lastStatsSendTime >= statsSendInterval) { + lastStatsSendTime = now + const statsPayload = JSON.stringify({ op: 'stats', ...stats }) - for (const player of session.players.players.values()) { - if (player?.track && !player.isPaused && player.connection) { - if ( - player._lastStreamDataTime > 0 && - Date.now() - player._lastStreamDataTime >= zombieThreshold - ) { - logger( - 'warn', - 'Player', - `Player for guild ${player.guildId} detected as zombie (no stream data).` - ) - player.emitEvent(GatewayEvents.TRACK_STUCK, { - guildId: player.guildId, - track: player.track, - reason: 'no_stream_data', - thresholdMs: zombieThreshold - }) - } - player._sendUpdate() + for (const session of this.sessions.values()) { + if (session.socket) { + session.socket.send(statsPayload) } } } - }, updateInterval) + }, metricsInterval) } + _stopGlobalPlayerUpdater() { if (this._globalUpdater) { clearInterval(this._globalUpdater) this._globalUpdater = null } + if (this._statsUpdater) { + clearInterval(this._statsUpdater) + this._statsUpdater = null + } } - _cleanupWebSocketServer() { - if (isBun && this.server) { - this.server.stop() - logger('info', 'WebSocket', 'Bun server stopped successfully') + async _cleanupWebSocketServer() { + if (this._usingBunServer && this.server) { + try { + logger('info', 'WebSocket', 'Stopping Bun server...') + await this.server.stop(true) + this.server.unref() + logger('info', 'WebSocket', 'Bun server stopped successfully') + } catch (e) { + logger( + 'error', + 'WebSocket', + `Error stopping Bun server: ${e?.message ?? e}` + ) + } return } if (this.socket) { try { - this.socket.close() - logger('info', 'WebSocket', 'WebSocket server closed successfully') + let closedCount = 0 + + for (const session of this.sessions.activeSessions.values()) { + if (session.socket) { + try { + session.socket.close(1000, 'Server shutdown') + closedCount++ + } catch (_e) { + try { + session.socket.destroy() + } catch (_destroyErr) { + logger( + 'debug', + 'WebSocket', + `Failed to close/destroy socket for session ${session.id}` + ) + } + } + } + } + + this.sessions.activeSessions.clear() + this.sessions.resumableSessions.clear() + + logger( + 'info', + 'WebSocket', + `Closed ${closedCount} WebSocket connection(s) successfully` + ) } catch (error) { logger( 'error', 'WebSocket', - `Error closing WebSocket server: ${error.message}` + `Error closing WebSocket connections: ${error.message}` ) } } @@ -985,14 +1378,27 @@ class NodelinkServer { for (const [sessionId, guildsInSession] of sessionsToNotify.entries()) { const session = this.sessions.get(sessionId) if (session?.socket) { + const affected = Array.from(guildsInSession) session.socket.send( JSON.stringify({ op: 'event', type: 'WorkerFailedEvent', - affectedGuilds: Array.from(guildsInSession), - message: `Players for guilds ${Array.from(guildsInSession).join(', ')} lost due to worker failure.` + affectedGuilds: affected, + message: `Players for guilds ${affected.join(', ')} lost due to worker failure.` }) ) + for (const guildId of affected) { + session.socket.send( + JSON.stringify({ + op: 'event', + type: GatewayEvents.WEBSOCKET_CLOSED, + guildId, + code: 5001, + reason: 'worker_failed', + byRemote: false + }) + ) + } } } } @@ -1001,45 +1407,30 @@ class NodelinkServer { async start(startOptions = {}) { this._validateConfig() + await this.credentialManager.load() await this.statsManager.initialize() + + // Ensure sources are initialized before proceeding + if (this._sourceInitPromise) await this._sourceInitPromise + await this.pluginManager.load('master') - - if (!startOptions.isClusterPrimary) { - await this.pluginManager.load('worker') - } - if (this.options.sources.youtube?.getOAuthToken) { - logger( - 'info', - 'OAuth', - 'Starting YouTube OAuth token acquisition process...' - ) - try { - await OAuth.acquireRefreshToken() - logger( - 'info', - 'OAuth', - 'YouTube OAuth token acquisition completed. Please update your config.js with the refresh token and set sources.youtube.getOAuthToken to false.' - ) - process.exit(0) - } catch (error) { - logger( - 'error', - 'OAuth', - `YouTube OAuth token acquisition failed: ${error.message}` - ) - process.exit(1) - } + if (this.sourceWorkerManager) { + await this.sourceWorkerManager.start() } + const specEnabled = this.options.cluster?.specializedSourceWorker?.enabled + if (!startOptions.isClusterPrimary) { - await this.sources.loadFolder() + await this.pluginManager.load('worker') + } + if (this.sources && (!startOptions.isClusterPrimary || !specEnabled)) { + await this.sources.loadFolder() await this.lyrics.loadFolder() } this._setupSocketEvents() - this._createServer() if (startOptions.isClusterWorker) { @@ -1054,7 +1445,7 @@ class NodelinkServer { try { try { handle.pause?.() - } catch (e) {} + } catch (_e) {} this.server.emit('connection', handle) } catch (err) { logger( @@ -1064,11 +1455,11 @@ class NodelinkServer { ) try { handle.destroy?.() - } catch (e) {} + } catch (_e) {} } }) } else { - if (!isBun) this._listen() + this._listen() } if (startOptions.isClusterPrimary) { @@ -1076,37 +1467,57 @@ class NodelinkServer { } else { this._startGlobalUpdater() } + + if (!startOptions.isClusterPrimary || clusterEnabled) { + this._startHeartbeat() + } + this.connectionManager.start() return this } _startMasterMetricsUpdater() { if (this._globalUpdater) return - const updateInterval = Math.max( + const statsSendInterval = Math.max( 1, - this.options?.playerUpdateInterval ?? 5000 + this.options?.statsUpdateInterval ?? 30000 ) + const metricsInterval = this.options?.metrics?.enabled + ? 5000 + : statsSendInterval + + let lastStatsSendTime = 0 this._globalUpdater = setInterval(() => { + const now = Date.now() const stats = getStats(this) - const workerMetrics = this.workerManager ? this.workerManager.getWorkerMetrics() : null + const workerMetrics = this.workerManager + ? this.workerManager.getWorkerMetrics() + : null this.statsManager.updateStatsMetrics(stats, workerMetrics) - const statsPayload = JSON.stringify({ op: 'stats', ...stats }) - for (const session of this.sessions.values()) { - if (session.socket) { - session.socket.send(statsPayload) - } - } - const sessionCount = this.sessions.activeSessions?.size || 0 this.statsManager.setWebsocketConnections(sessionCount) - }, updateInterval) + + if (now - lastStatsSendTime >= statsSendInterval) { + lastStatsSendTime = now + const statsPayload = JSON.stringify({ op: 'stats', ...stats }) + for (const session of this.sessions.values()) { + if (session.socket) { + session.socket.send(statsPayload) + } + } + } + }, metricsInterval) } registerSource(name, source) { if (!this.sources) { - logger('warn', 'Server', 'Cannot register source in this context (sources manager not available).') + logger( + 'warn', + 'Server', + 'Cannot register source in this context (sources manager not available).' + ) return } this.sources.sources.set(name, source) @@ -1139,7 +1550,8 @@ class NodelinkServer { } registerAudioInterceptor(interceptor) { - if (!this.extensions.audioInterceptors) this.extensions.audioInterceptors = [] + if (!this.extensions.audioInterceptors) + this.extensions.audioInterceptors = [] this.extensions.audioInterceptors.push(interceptor) logger('info', 'Server', 'Registered custom audio interceptor') } @@ -1153,6 +1565,24 @@ class NodelinkServer { import WorkerManager from './managers/workerManager.js' if (clusterEnabled && cluster.isPrimary) { + if (config.sources?.youtube?.getOAuthToken) { + const mockNodelink = { options: config } + const validator = new OAuth(mockNodelink) + await validator.validateCurrentTokens() + + try { + await OAuth.acquireRefreshToken() + process.exit(0) + } catch (error) { + logger( + 'error', + 'OAuth', + `YouTube OAuth token acquisition failed: ${error.message}` + ) + process.exit(1) + } + } + const workerManager = new WorkerManager(config) const serverInstancePromise = (async () => { @@ -1162,14 +1592,53 @@ if (clusterEnabled && cluster.isPrimary) { await nserver.start({ isClusterPrimary: true }) global.nodelink = nserver - process.on('beforeExit', () => { + let isShuttingDown = false + const shutdown = async () => { + if (isShuttingDown) return + isShuttingDown = true + + if (nserver.workerManager) nserver.workerManager.isDestroying = true + nserver.emit('shutdown') + + process.stdout.write( + '\n \x1b[32m💚 Thank you for using NodeLink!\x1b[0m\n' + ) + process.stdout.write( + ' \x1b[37mIf you have ideas, suggestions or want to report bugs, join us on Discord:\x1b[0m\n' + ) + process.stdout.write( + ' \x1b[1m\x1b[34m➜\x1b[0m \x1b[36mhttps://discord.gg/fzjksWS65v\x1b[0m\n\n' + ) + + logger( + 'info', + 'Server', + 'Shutdown signal received. Cleaning up resources...' + ) + + nserver._stopHeartbeat() + + await nserver.credentialManager.forceSave() + workerManager.destroy() - nserver._cleanupWebSocketServer() + + await nserver._cleanupWebSocketServer() + + if (nserver.server?.listening) { + await new Promise((resolve) => nserver.server.close(resolve)) + logger('info', 'Server', 'HTTP server closed.') + } + cleanupHttpAgents() - cleanupLogger() nserver.rateLimitManager.destroy() nserver.dosProtectionManager.destroy() - }) + cleanupLogger() + + process.exit(0) + } + + process.once('SIGINT', shutdown) + process.once('SIGTERM', shutdown) return nserver })() @@ -1189,13 +1658,48 @@ if (clusterEnabled && cluster.isPrimary) { `Single-process server running (PID ${process.pid})` ) - process.on('beforeExit', () => { - nserver._cleanupWebSocketServer() + let isShuttingDown = false + const shutdown = async () => { + if (isShuttingDown) return + isShuttingDown = true + + logger( + 'info', + 'Server', + 'Shutdown signal received. Cleaning up resources...' + ) + + nserver._stopHeartbeat() + + await nserver.credentialManager.forceSave() + + await nserver._cleanupWebSocketServer() + + if (nserver.server?.listening) { + await new Promise((resolve) => nserver.server.close(resolve)) + logger('info', 'Server', 'HTTP server closed.') + } + cleanupHttpAgents() - cleanupLogger() nserver.rateLimitManager.destroy() nserver.dosProtectionManager.destroy() - }) + cleanupLogger() + + process.stdout.write( + '\n \x1b[32m💚 Thank you for using NodeLink!\x1b[0m\n' + ) + process.stdout.write( + ' \x1b[37mIf you have ideas, suggestions or want to report bugs, join us on Discord:\x1b[0m\n' + ) + process.stdout.write( + ' \x1b[1m\x1b[34m➜\x1b[0m \x1b[36mhttps://discord.gg/fzjksWS65v\x1b[0m\n\n' + ) + + process.exit(0) + } + + process.once('SIGINT', shutdown) + process.once('SIGTERM', shutdown) return nserver })() diff --git a/src/lyrics/bilibili.js b/src/lyrics/bilibili.js new file mode 100644 index 0000000..4cffc82 --- /dev/null +++ b/src/lyrics/bilibili.js @@ -0,0 +1,168 @@ +import crypto from 'node:crypto' +import { logger, makeRequest } from '../utils.js' + +const MIXIN_KEY_ENC_TAB = [ + 46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49, + 33, 9, 42, 19, 29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40, 61, + 26, 17, 0, 1, 60, 51, 30, 4, 22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11, 36, + 20, 34, 44, 52 +] + +export default class BilibiliLyrics { + constructor(nodelink) { + this.nodelink = nodelink + this.wbiKeys = null + this.wbiKeysExpiry = 0 + } + + async setup() { + return true + } + + async _getWbiKeys() { + if (this.wbiKeys && Date.now() < this.wbiKeysExpiry) { + return this.wbiKeys + } + + const cachedKeys = this.nodelink.credentialManager.get('bilibili_wbi_keys') + if (cachedKeys) { + this.wbiKeys = cachedKeys + this.wbiKeysExpiry = Date.now() + 1000 * 60 * 60 + return this.wbiKeys + } + + const { body, error } = await makeRequest( + 'https://api.bilibili.com/x/web-interface/nav', + { + method: 'GET' + } + ) + + if (error || !body?.data?.wbi_img) { + throw new Error('Failed to fetch WBI keys') + } + + const { img_url, sub_url } = body.data.wbi_img + const imgKey = img_url.slice( + img_url.lastIndexOf('/') + 1, + img_url.lastIndexOf('.') + ) + const subKey = sub_url.slice( + sub_url.lastIndexOf('/') + 1, + sub_url.lastIndexOf('.') + ) + + const rawKey = imgKey + subKey + let mixinKey = '' + for (const index of MIXIN_KEY_ENC_TAB) { + if (rawKey[index]) mixinKey += rawKey[index] + } + + this.wbiKeys = mixinKey.slice(0, 32) + this.wbiKeysExpiry = Date.now() + 1000 * 60 * 60 + this.nodelink.credentialManager.set( + 'bilibili_wbi_keys', + this.wbiKeys, + 1000 * 60 * 60 + ) + + return this.wbiKeys + } + + _signWbi(params, mixinKey) { + const currTime = Math.round(Date.now() / 1000) + const newParams = { ...params, wts: currTime } + + const query = Object.keys(newParams) + .sort() + .map((key) => { + const value = newParams[key].toString().replace(/[!'()*]/g, '') + return `${encodeURIComponent(key)}=${encodeURIComponent(value)}` + }) + .join('&') + + const w_rid = crypto + .createHash('md5') + .update(query + mixinKey) + .digest('hex') + + return `${query}&w_rid=${w_rid}` + } + + async getLyrics(track) { + const info = track.info || track + const pluginInfo = track.pluginInfo || {} + + if (info.sourceName !== 'bilibili') + return { loadType: 'empty', data: {} } + + try { + let bvid = info.identifier + let aid = pluginInfo?.aid + let cid = pluginInfo?.cid + + if (!aid || !cid) { + if (bvid.includes('?p=')) bvid = bvid.split('?p=')[0] + const { body } = await makeRequest( + `https://api.bilibili.com/x/web-interface/view?bvid=${bvid}`, + { method: 'GET' } + ) + if (body.code === 0) { + aid = body.data.aid + cid = body.data.cid + } + } + + if (!aid || !cid) return { loadType: 'empty', data: {} } + + const mixinKey = await this._getWbiKeys() + const query = this._signWbi( + { + bvid: bvid, + cid: cid + }, + mixinKey + ) + + const { body } = await makeRequest( + `https://api.bilibili.com/x/player/wbi/v2?${query}`, + { method: 'GET' } + ) + + if (body.code !== 0 || !body.data.subtitle?.subtitles) { + return { loadType: 'empty', data: {} } + } + + const subtitles = body.data.subtitle.subtitles + if (subtitles.length === 0) return { loadType: 'empty', data: {} } + + const subUrl = subtitles[0].subtitle_url + if (!subUrl) return { loadType: 'empty', data: {} } + + const { body: subData } = await makeRequest( + subUrl.startsWith('//') ? `https:${subUrl}` : subUrl, + { method: 'GET' } + ) + + if (!subData?.body) return { loadType: 'empty', data: {} } + + const lines = subData.body.map((line) => ({ + time: Math.floor(line.from * 1000), + duration: Math.floor((line.to - line.from) * 1000), + text: line.content + })) + + return { + loadType: 'lyrics', + data: { + name: 'Bilibili CC', + synced: true, + lines: lines + } + } + } catch (e) { + logger('error', 'Lyrics', `Bilibili lyrics failed: ${e.message}`) + return { loadType: 'empty', data: {} } + } + } +} diff --git a/src/lyrics/lrclib.js b/src/lyrics/lrclib.js index c74b100..b513ad7 100644 --- a/src/lyrics/lrclib.js +++ b/src/lyrics/lrclib.js @@ -8,8 +8,7 @@ const CLEAN_PATTERNS = [ ] // this clears titles like [Official] or (Official), etc... this improves the accuary of the lyrics -const FEAT_PATTERN = - /\s*[\(\[]\s*(?:ft\.?|feat\.?|featuring)\s+[^\)\]]+[\)\]]/gi +const FEAT_PATTERN = /\s*[([]\s*(?:ft\.?|feat\.?|featuring)\s+[^)\]]+[)\]]/gi const SEPARATORS = [' - ', ' – ', ' — '] diff --git a/src/lyrics/musixmatch.js b/src/lyrics/musixmatch.js index daf9bcc..687fddc 100644 --- a/src/lyrics/musixmatch.js +++ b/src/lyrics/musixmatch.js @@ -1,9 +1,9 @@ -import { readFile, unlink, writeFile } from 'node:fs/promises' +import crypto from 'node:crypto' +import { unlink } from 'node:fs/promises' import os from 'node:os' import path from 'node:path' -import crypto from 'node:crypto' -import { logger, http1makeRequest } from '../utils.js' +import { http1makeRequest, logger } from '../utils.js' const APP_ID = 'web-desktop-app-v1.0' const TOKEN_TTL = 55000 @@ -25,8 +25,7 @@ const CLEAN_PATTERNS = [ /VEVO$/i ] -const FEAT_PATTERN = - /\s*[\(\[]\s*(?:ft\.?|feat\.?|featuring)\s+[^\)\]]+[\)\]]/gi +const FEAT_PATTERN = /\s*[([]\s*(?:ft\.?|feat\.?|featuring)\s+[^)\]]+[)\]]/gi const SEPARATORS = [' - ', ' – ', ' — '] @@ -90,8 +89,16 @@ export default class MusixmatchLyrics { ) if (!this.useManualToken) { - this.tokenData = await this._readToken() - if (this.tokenData) logger('info', 'Lyrics', 'Loaded existing token') + const cachedToken = + this.nodelink.credentialManager.get('musixmatch_token') + if (cachedToken) { + this.tokenData = cachedToken + logger( + 'info', + 'Lyrics', + 'Loaded Musixmatch token from CredentialManager' + ) + } } // Start cache cleanup interval @@ -148,31 +155,6 @@ export default class MusixmatchLyrics { : Array.from(this.cookies, ([k, v]) => `${k}=${v}`).join('; ') } - async _readToken() { - try { - const data = await readFile(this.tokenFile, 'utf-8') - const parsed = JSON.parse(data) - if ( - parsed?.value && - typeof parsed.expires === 'number' && - parsed.expires > Date.now() - ) { - return parsed - } - } catch {} - return null - } - - async _saveToken(token, expires) { - try { - await writeFile( - this.tokenFile, - JSON.stringify({ value: token, expires }), - 'utf-8' - ) - } catch {} - } - async _fetchToken() { const url = _buildUrl(ENDPOINTS.TOKEN, { app_id: APP_ID }) const { statusCode, headers, body } = await http1makeRequest(url, { @@ -248,7 +230,11 @@ export default class MusixmatchLyrics { const token = await this._fetchToken() const expires = Date.now() + TOKEN_TTL this.tokenData = { value: token, expires } - await this._saveToken(token, expires) + this.nodelink.credentialManager.set( + 'musixmatch_token', + this.tokenData, + TOKEN_TTL + ) return token } catch (err) { const isCaptcha = err.message?.toLowerCase().includes('captcha') @@ -260,7 +246,11 @@ export default class MusixmatchLyrics { const token = await this._fetchToken() const expires = Date.now() + TOKEN_TTL this.tokenData = { value: token, expires } - await this._saveToken(token, expires) + this.nodelink.credentialManager.set( + 'musixmatch_token', + this.tokenData, + TOKEN_TTL + ) return token } diff --git a/src/lyrics/youtube.js b/src/lyrics/youtube.js index 99e311d..409989a 100644 --- a/src/lyrics/youtube.js +++ b/src/lyrics/youtube.js @@ -49,7 +49,7 @@ export default class YouTubeLyrics { captionTracks.find((c) => c.kind !== 'asr') || captionTracks[0] - if (defaultTrack && defaultTrack.isTranslatable) { + if (defaultTrack?.isTranslatable) { trackLang = { ...defaultTrack, languageCode: language, diff --git a/src/managers/connectionManager.js b/src/managers/connectionManager.js index 9f79d29..238b42d 100644 --- a/src/managers/connectionManager.js +++ b/src/managers/connectionManager.js @@ -1,7 +1,12 @@ import { GatewayEvents } from '../constants.js' import { http1makeRequest, logger } from '../utils.js' -const TEST_FILE_URL = 'http://cachefly.cachefly.net/10mb.test' +const TEST_URLS = [ + 'http://cachefly.cachefly.net/10mb.test', + 'http://speedtest.tele2.net/10MB.zip', + 'http://ping.online.net/10Mo.dat', + 'http://proof.ovh.net/files/10Mb.dat' +] export default class ConnectionManager { constructor(nodelink) { @@ -35,128 +40,107 @@ export default class ConnectionManager { async checkConnection() { if (this.isChecking) { - logger( - 'debug', - 'ConnectionManager', - 'Connection check already in progress.' - ) return } this.isChecking = true - const startTime = Date.now() - let downloadedBytes = 0 - - try { - const { stream, error, statusCode } = await http1makeRequest( - TEST_FILE_URL, - { - method: 'GET', - streamOnly: true, - timeout: this.config.timeout || 10000 - }, - this.nodelink - ) - - if (error || statusCode !== 200) { - throw new Error( - `Failed to download test file: ${error?.message || `Status code ${statusCode}`}` - ) - } - - stream.on('data', (chunk) => { - downloadedBytes += chunk.length - }) - - stream.on('end', () => { - this.isChecking = false - const endTime = Date.now() - const durationSeconds = (endTime - startTime) / 1000 - - if (durationSeconds === 0) return - - const speedBps = downloadedBytes / durationSeconds - const speedKbps = (speedBps * 8) / 1024 - const speedMbps = speedKbps / 1024 - - let newStatus = 'good' - if (speedMbps < (this.config.thresholds?.bad ?? 1)) { - newStatus = 'bad' - } else if (speedMbps < (this.config.thresholds?.average ?? 5)) { - newStatus = 'average' - } - - this.metrics = { - speed: { - bps: speedBps, - kbps: speedKbps, - mbps: Number.parseFloat(speedMbps.toFixed(2)) + for (const url of TEST_URLS) { + const startTime = Date.now() + let downloadedBytes = 0 + + try { + const { stream, error, statusCode } = await http1makeRequest( + url, + { + method: 'GET', + streamOnly: true, + timeout: this.config.timeout || 10000 }, - downloadedBytes, - durationSeconds: Number.parseFloat(durationSeconds.toFixed(2)), - timestamp: Date.now() - } + this.nodelink + ) - const shouldLog = this.config.logAllChecks || newStatus !== this.status - if (shouldLog) { - if (newStatus === 'bad') { - logger( - 'warn', - 'Network', - `Your internet connection is very slow (${speedMbps.toFixed(2)} Mbps).` - ) - logger( - 'warn', - 'Network', - 'This will cause performance issues and poor stream quality.' - ) - logger( - 'warn', - 'Network', - 'Try switching to a different network or deploying the server to a cloud instance with high-speed internet.' - ) - } else { - logger( - 'network', - 'ConnectionManager', - `Connection speed: ${this.metrics.speed.mbps} Mbps (${newStatus})` - ) - } + if (error || statusCode !== 200) { + continue } - if (newStatus !== this.status) { - this.status = newStatus - this.broadcastStatus() - } - }) + await new Promise((resolve, reject) => { + stream.on('data', (chunk) => { + downloadedBytes += chunk.length + }) + + stream.on('end', () => { + const endTime = Date.now() + const durationSeconds = (endTime - startTime) / 1000 + + if (durationSeconds === 0) { + resolve() + return + } + + const speedBps = downloadedBytes / durationSeconds + const speedKbps = (speedBps * 8) / 1024 + const speedMbps = speedKbps / 1024 + + let newStatus = 'good' + if (speedMbps < (this.config.thresholds?.bad ?? 1)) { + newStatus = 'bad' + } else if (speedMbps < (this.config.thresholds?.average ?? 5)) { + newStatus = 'average' + } + + this.metrics = { + speed: { + bps: speedBps, + kbps: speedKbps, + mbps: Number.parseFloat(speedMbps.toFixed(2)) + }, + downloadedBytes, + durationSeconds: Number.parseFloat(durationSeconds.toFixed(2)), + timestamp: Date.now() + } + + const shouldLog = + this.config.logAllChecks || newStatus !== this.status + if (shouldLog) { + if (newStatus === 'bad') { + logger( + 'warn', + 'Network', + `Your internet connection is very slow (${speedMbps.toFixed(2)} Mbps).` + ) + } else { + logger( + 'network', + 'ConnectionManager', + `Connection speed: ${this.metrics.speed.mbps} Mbps (${newStatus})` + ) + } + } + + if (newStatus !== this.status) { + this.status = newStatus + this.broadcastStatus() + } + resolve() + }) + + stream.on('error', reject) + }) - stream.on('error', (err) => { this.isChecking = false - const errorMessage = `Stream error during download: ${err.message}` - logger( - 'error', - 'ConnectionManager', - `Connection check failed: ${errorMessage}` - ) - if (this.status !== 'disconnected') { - this.status = 'disconnected' - this.metrics = { error: errorMessage, timestamp: Date.now() } - this.broadcastStatus() - } - }) - } catch (e) { - this.isChecking = false - logger( - 'error', - 'ConnectionManager', - `Connection check failed: ${e.message}` - ) - if (this.status !== 'disconnected') { - this.status = 'disconnected' - this.metrics = { error: e.message, timestamp: Date.now() } - this.broadcastStatus() + return + } catch (_e) {} + } + + this.isChecking = false + if (this.status !== 'disconnected') { + this.status = 'disconnected' + this.metrics = { + error: 'All connection tests failed', + timestamp: Date.now() } + this.broadcastStatus() } } @@ -170,9 +154,11 @@ export default class ConnectionManager { const payloadStr = JSON.stringify(payload) - for (const session of this.nodelink.sessions.values()) { - if (session.socket) { - session.socket.send(payloadStr) + if (this.nodelink.sessions?.values) { + for (const session of this.nodelink.sessions.values()) { + if (session.socket) { + session.socket.send(payloadStr) + } } } } diff --git a/src/managers/credentialManager.js b/src/managers/credentialManager.js new file mode 100644 index 0000000..abfdb70 --- /dev/null +++ b/src/managers/credentialManager.js @@ -0,0 +1,122 @@ +import crypto from 'node:crypto' +import fs from 'node:fs/promises' +import { logger } from '../utils.js' + +export default class CredentialManager { + constructor(nodelink) { + this.nodelink = nodelink + this.key = crypto.scryptSync( + nodelink.options.server.password, + 'nodelink-salt', + 32 + ) + this.filePath = './.cache/credentials.bin' + this.credentials = new Map() + this._saveTimeout = null + } + + async load() { + try { + const data = await fs.readFile(this.filePath) + if (data.length < 32) return + + const iv = data.subarray(0, 16) + const tag = data.subarray(16, 32) + const encrypted = data.subarray(32) + + const decipher = crypto.createDecipheriv('aes-256-gcm', this.key, iv) + decipher.setAuthTag(tag) + + const decrypted = + decipher.update(encrypted, 'binary', 'utf8') + decipher.final('utf8') + const obj = JSON.parse(decrypted) + + this.credentials = new Map(Object.entries(obj)) + logger('debug', 'Credentials', 'Loaded encrypted credentials from disk.') + } catch (e) { + if (e.code !== 'ENOENT') { + logger( + 'error', + 'Credentials', + `Failed to decrypt credentials: ${e.message}` + ) + } + this.credentials = new Map() + } + } + + async save() { + if (this._saveTimeout) return + + this._saveTimeout = setTimeout(async () => { + this._saveTimeout = null + try { + const plainText = JSON.stringify(Object.fromEntries(this.credentials)) + const iv = crypto.randomBytes(16) + const cipher = crypto.createCipheriv('aes-256-gcm', this.key, iv) + + const encrypted = Buffer.concat([ + cipher.update(plainText, 'utf8'), + cipher.final() + ]) + const tag = cipher.getAuthTag() + + await fs.mkdir('./.cache', { recursive: true }) + await fs.writeFile(this.filePath, Buffer.concat([iv, tag, encrypted])) + } catch (e) { + logger( + 'error', + 'Credentials', + `Failed to save credentials: ${e.message}` + ) + } + }, 1000) + } + + async forceSave() { + if (this._saveTimeout) { + clearTimeout(this._saveTimeout) + this._saveTimeout = null + } + + try { + const plainText = JSON.stringify(Object.fromEntries(this.credentials)) + const iv = crypto.randomBytes(16) + const cipher = crypto.createCipheriv('aes-256-gcm', this.key, iv) + + const encrypted = Buffer.concat([ + cipher.update(plainText, 'utf8'), + cipher.final() + ]) + const tag = cipher.getAuthTag() + + await fs.mkdir('./.cache', { recursive: true }) + await fs.writeFile(this.filePath, Buffer.concat([iv, tag, encrypted])) + logger('debug', 'Credentials', 'Force saved credentials to disk.') + } catch (e) { + logger( + 'error', + 'Credentials', + `Failed to force save credentials: ${e.message}` + ) + } + } + + get(key) { + const entry = this.credentials.get(key) + if (!entry) return null + if (entry.expiresAt && Date.now() > entry.expiresAt) { + this.credentials.delete(key) + return null + } + return entry.value + } + + set(key, value, ttlMs = 0) { + this.credentials.set(key, { + value, + expiresAt: ttlMs ? Date.now() + ttlMs : null + }) + this.save() + } +} diff --git a/src/managers/dosProtectionManager.js b/src/managers/dosProtectionManager.js index 968e184..ef7719e 100644 --- a/src/managers/dosProtectionManager.js +++ b/src/managers/dosProtectionManager.js @@ -35,14 +35,17 @@ export default class DosProtectionManager { const now = Date.now() if (this.config.ignore) { - if (this.config.ignore.ips?.includes(remoteAddress)) return { allowed: true } + if (this.config.ignore.ips?.includes(remoteAddress)) + return { allowed: true } const userId = req.headers['user-id'] - if (userId && this.config.ignore.userIds?.includes(userId)) return { allowed: true } + if (userId && this.config.ignore.userIds?.includes(userId)) + return { allowed: true } const guildIdMatch = req.url?.match(/\/players\/(\d+)/) const guildId = guildIdMatch ? guildIdMatch[1] : null - if (guildId && this.config.ignore.guildIds?.includes(guildId)) return { allowed: true } + if (guildId && this.config.ignore.guildIds?.includes(guildId)) + return { allowed: true } } if (!this.ipRequestCounts.has(remoteAddress)) { diff --git a/src/managers/lyricsManager.js b/src/managers/lyricsManager.js index 4b34111..0af94a7 100644 --- a/src/managers/lyricsManager.js +++ b/src/managers/lyricsManager.js @@ -128,14 +128,12 @@ export default class LyricsManager { } } - const sourceName = reliableTrackData.data.info.sourceName + const trackInfo = reliableTrackData.data?.info || reliableTrackData.data + const sourceName = trackInfo?.sourceName const lyricsSource = this.lyricsSources.get(sourceName) if (lyricsSource) { - const lyrics = await lyricsSource.getLyrics( - reliableTrackData.data.info, - language - ) + const lyrics = await lyricsSource.getLyrics(trackInfo, language) if (lyrics && lyrics.loadType !== 'empty') { return lyrics } @@ -146,12 +144,9 @@ export default class LyricsManager { logger( 'debug', 'Lyrics', - `Trying lyrics source ${name} for ${reliableTrackData.data.info.title}.` - ) - const lyrics = await source.getLyrics( - reliableTrackData.data.info, - language + `Trying lyrics source ${name} for ${trackInfo?.title || 'Unknown Title'}.` ) + const lyrics = await source.getLyrics(trackInfo, language) if (lyrics && lyrics.loadType !== 'empty') { return lyrics } @@ -161,7 +156,7 @@ export default class LyricsManager { logger( 'debug', 'Lyrics', - `No lyrics found for ${reliableTrackData.data.info.title}` + `No lyrics found for ${trackInfo?.title || 'Unknown Track'}` ) return { loadType: 'empty', data: {} } } diff --git a/src/managers/playerManager.js b/src/managers/playerManager.js index adb9c1a..fcca247 100644 --- a/src/managers/playerManager.js +++ b/src/managers/playerManager.js @@ -19,7 +19,11 @@ export default class PlayerManager { return { handled: true, result } } } catch (e) { - logger('error', 'PlayerManager', `Interceptor error for ${action}: ${e.message}`) + logger( + 'error', + 'PlayerManager', + `Interceptor error for ${action}: ${e.message}` + ) } } return null @@ -43,26 +47,37 @@ export default class PlayerManager { if (!worker) { throw new Error('No workers available to create a player.') } - this.nodelink.workerManager.assignGuildToWorker(playerKey, worker) - logger( - 'debug', - 'PlayerManager', - `Creating player for guild ${guildId} (session: ${this.sessionId}) on worker ${worker.id}` - ) - await this.nodelink.workerManager.execute(worker, 'createPlayer', { - sessionId: this.sessionId, - guildId, - userId: session.userId, - voice - }) - - this.players.set(playerKey, { - guildId, - userId: session.userId, - sessionId: this.sessionId - }) - return this.players.get(playerKey) + let created = false + try { + logger( + 'debug', + 'PlayerManager', + `Creating player for guild ${guildId} (session: ${this.sessionId}) on worker ${worker.id}` + ) + await this.nodelink.workerManager.execute(worker, 'createPlayer', { + sessionId: this.sessionId, + guildId, + userId: session.userId, + voice + }) + + this.nodelink.workerManager.assignGuildToWorker(playerKey, worker) + created = true + + this.players.set(playerKey, { + guildId, + userId: session.userId, + sessionId: this.sessionId + }) + return this.players.get(playerKey) + } catch (e) { + if (!created) { + this.nodelink.workerManager.unassignGuild(playerKey) + throw new Error('The player could not be created.', e) + } + throw e + } } const { Player } = await import('../playback/player.js') logger( @@ -124,7 +139,11 @@ export default class PlayerManager { } async play(guildId, trackPayload) { - const interception = await this._runInterceptors('play', guildId, trackPayload) + const interception = await this._runInterceptors( + 'play', + guildId, + trackPayload + ) if (interception?.handled) return interception.result const session = this.nodelink.sessions.get(this.sessionId) @@ -144,7 +163,7 @@ export default class PlayerManager { args: [trackPayload] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -175,7 +194,7 @@ export default class PlayerManager { args: [] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -186,7 +205,11 @@ export default class PlayerManager { } async pause(guildId, shouldPause) { - const interception = await this._runInterceptors('pause', guildId, shouldPause) + const interception = await this._runInterceptors( + 'pause', + guildId, + shouldPause + ) if (interception?.handled) return interception.result const session = this.nodelink.sessions.get(this.sessionId) @@ -206,7 +229,7 @@ export default class PlayerManager { args: [shouldPause] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -217,7 +240,12 @@ export default class PlayerManager { } async seek(guildId, position, endTime) { - const interception = await this._runInterceptors('seek', guildId, position, endTime) + const interception = await this._runInterceptors( + 'seek', + guildId, + position, + endTime + ) if (interception?.handled) return interception.result const session = this.nodelink.sessions.get(this.sessionId) @@ -237,7 +265,7 @@ export default class PlayerManager { args: [position, endTime] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -268,7 +296,7 @@ export default class PlayerManager { args: [level] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -279,7 +307,11 @@ export default class PlayerManager { } async setFilters(guildId, filtersPayload) { - const interception = await this._runInterceptors('setFilters', guildId, filtersPayload) + const interception = await this._runInterceptors( + 'setFilters', + guildId, + filtersPayload + ) if (interception?.handled) return interception.result const session = this.nodelink.sessions.get(this.sessionId) @@ -299,7 +331,7 @@ export default class PlayerManager { args: [filtersPayload] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -310,7 +342,11 @@ export default class PlayerManager { } async updateVoice(guildId, voicePayload) { - const interception = await this._runInterceptors('updateVoice', guildId, voicePayload) + const interception = await this._runInterceptors( + 'updateVoice', + guildId, + voicePayload + ) if (interception?.handled) return interception.result const session = this.nodelink.sessions.get(this.sessionId) @@ -330,7 +366,7 @@ export default class PlayerManager { args: [voicePayload] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -358,7 +394,7 @@ export default class PlayerManager { args: [] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -386,7 +422,7 @@ export default class PlayerManager { args: [trackPayload, volume] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -414,7 +450,7 @@ export default class PlayerManager { args: [mixId] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -442,7 +478,7 @@ export default class PlayerManager { args: [mixId, volume] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result @@ -470,7 +506,7 @@ export default class PlayerManager { args: [] } ) - if (result && result.playerNotFound) { + if (result?.playerNotFound) { throw new Error('Player not found.') } return result diff --git a/src/managers/pluginManager.js b/src/managers/pluginManager.js index fd17254..5e1dedb 100644 --- a/src/managers/pluginManager.js +++ b/src/managers/pluginManager.js @@ -16,7 +16,11 @@ export default class PluginManager { } async load(contextType) { - logger('info', 'PluginManager', `Initializing plugins in ${contextType} context...`) + logger( + 'info', + 'PluginManager', + `Initializing plugins in ${contextType} context...` + ) try { await fs.access(this.pluginsDir) @@ -35,7 +39,7 @@ export default class PluginManager { async _findPackageJson(startPath) { let currentDir = path.dirname(startPath) - + while (currentDir !== path.parse(currentDir).root) { const pkgPath = path.join(currentDir, 'package.json') try { @@ -63,7 +67,7 @@ export default class PluginManager { try { let entryPoint = null - let pluginMeta = { + const pluginMeta = { name, version: '0.0.0', author: 'Unknown', @@ -73,16 +77,18 @@ export default class PluginManager { if (source === 'local') { const resolvedPath = path.resolve(this.pluginsDir, localPath || name) const stat = await fs.stat(resolvedPath) - + if (stat.isDirectory()) { const pkgPath = path.join(resolvedPath, 'package.json') try { const pkgData = await fs.readFile(pkgPath, 'utf-8') const pkg = JSON.parse(pkgData) - + if (pkg.version) pluginMeta.version = pkg.version - if (pkg.author) pluginMeta.author = typeof pkg.author === 'object' ? pkg.author.name : pkg.author - if (pkg.homepage || (pkg.repository && pkg.repository.url)) { + if (pkg.author) + pluginMeta.author = + typeof pkg.author === 'object' ? pkg.author.name : pkg.author + if (pkg.homepage || pkg.repository?.url) { pluginMeta.topic = pkg.homepage || pkg.repository.url } @@ -101,17 +107,23 @@ export default class PluginManager { try { const pkgName = packageName || name entryPoint = require.resolve(pkgName) - + const pkg = await this._findPackageJson(entryPoint) if (pkg) { if (pkg.version) pluginMeta.version = pkg.version - if (pkg.author) pluginMeta.author = typeof pkg.author === 'object' ? pkg.author.name : pkg.author - if (pkg.homepage || (pkg.repository && pkg.repository.url)) { + if (pkg.author) + pluginMeta.author = + typeof pkg.author === 'object' ? pkg.author.name : pkg.author + if (pkg.homepage || pkg.repository?.url) { pluginMeta.topic = pkg.homepage || pkg.repository.url } } - } catch (e) { - logger('warn', 'PluginManager', `NPM package '${packageName || name}' not found.`) + } catch (_e) { + logger( + 'warn', + 'PluginManager', + `NPM package '${packageName || name}' not found.` + ) return } } @@ -122,7 +134,9 @@ export default class PluginManager { const pluginModule = await import(fileUrl) if (typeof pluginModule.default !== 'function') { - throw new Error(`Plugin '${name}' entry point must export a default function.`) + throw new Error( + `Plugin '${name}' entry point must export a default function.` + ) } this.loadedPlugins.set(name, { @@ -137,14 +151,19 @@ export default class PluginManager { const author = `\x1b[36m${pluginMeta.author}\x1b[0m` const pluginName = `\x1b[1m\x1b[32m${name}\x1b[0m` const version = `\x1b[33mv${pluginMeta.version}\x1b[0m` - const topic = pluginMeta.topic ? ` | \x1b[34mTopic:\x1b[0m ${pluginMeta.topic}` : '' + const topic = pluginMeta.topic + ? ` | \x1b[34mTopic:\x1b[0m ${pluginMeta.topic}` + : '' const creditString = `[${author}] ${pluginName} ${version}${topic}` - - logger('info', 'PluginManager', `Loaded: ${creditString}`) + logger('info', 'PluginManager', `Loaded: ${creditString}`) } catch (error) { - logger('error', 'PluginManager', `Failed to load plugin '${name}': ${error.message}`) + logger( + 'error', + 'PluginManager', + `Failed to load plugin '${name}': ${error.message}` + ) } } @@ -160,7 +179,11 @@ export default class PluginManager { try { await pluginModule.default(this.nodelink, specificConfig, context) } catch (err) { - logger('error', 'PluginManager', `Error executing plugin '${name}' in '${contextType}' context: ${err.message}`) + logger( + 'error', + 'PluginManager', + `Error executing plugin '${name}' in '${contextType}' context: ${err.message}` + ) } } -} \ No newline at end of file +} diff --git a/src/managers/rateLimitManager.js b/src/managers/rateLimitManager.js index abe240d..9ef0434 100644 --- a/src/managers/rateLimitManager.js +++ b/src/managers/rateLimitManager.js @@ -29,17 +29,28 @@ export default class RateLimitManager { (reqTime) => now - reqTime < timeWindowMs ) + const remaining = Math.max(0, maxRequests - entry.requests.length) + const reset = + entry.requests.length > 0 + ? entry.requests[0] + timeWindowMs + : now + timeWindowMs + if (entry.requests.length >= maxRequests) { - return false + return { allowed: false, limit: maxRequests, remaining: 0, reset } } entry.requests.push(now) - return true + return { + allowed: true, + limit: maxRequests, + remaining: remaining - 1, + reset + } } check(req, parsedUrl) { if (!this.config.enabled) { - return true + return { allowed: true } } if ( @@ -47,7 +58,7 @@ export default class RateLimitManager { parsedUrl.pathname.startsWith(path) ) ) { - return true + return { allowed: true } } const remoteAddress = req.socket.remoteAddress @@ -57,74 +68,75 @@ export default class RateLimitManager { : null if (this.config.ignore) { - if (this.config.ignore.ips?.includes(remoteAddress)) return true - if (userId && this.config.ignore.userIds?.includes(userId)) return true - if (guildId && this.config.ignore.guildIds?.includes(guildId)) return true + if (this.config.ignore.ips?.includes(remoteAddress)) + return { allowed: true } + if (userId && this.config.ignore.userIds?.includes(userId)) + return { allowed: true } + if (guildId && this.config.ignore.guildIds?.includes(guildId)) + return { allowed: true } } - if ( - !this._checkAndIncrement( - 'global', - 'all', - this.config.global.maxRequests, - this.config.global.timeWindowMs - ) - ) { + const globalCheck = this._checkAndIncrement( + 'global', + 'all', + this.config.global.maxRequests, + this.config.global.timeWindowMs + ) + if (!globalCheck.allowed) { logger( 'warn', 'RateLimit', `Global rate limit exceeded for ${remoteAddress}` ) - return false + return globalCheck } - if ( - !this._checkAndIncrement( - 'ip', - remoteAddress, - this.config.perIp.maxRequests, - this.config.perIp.timeWindowMs - ) - ) { + const ipCheck = this._checkAndIncrement( + 'ip', + remoteAddress, + this.config.perIp.maxRequests, + this.config.perIp.timeWindowMs + ) + if (!ipCheck.allowed) { logger('warn', 'RateLimit', `IP rate limit exceeded for ${remoteAddress}`) - return false + return ipCheck } - if ( - userId && - !this._checkAndIncrement( + if (userId) { + const userCheck = this._checkAndIncrement( 'userId', userId, this.config.perUserId.maxRequests, this.config.perUserId.timeWindowMs ) - ) { - logger( - 'warn', - 'RateLimit', - `User-Id rate limit exceeded for ${userId} (IP: ${remoteAddress})` - ) - return false + if (!userCheck.allowed) { + logger( + 'warn', + 'RateLimit', + `User-Id rate limit exceeded for ${userId} (IP: ${remoteAddress})` + ) + return userCheck + } } - if ( - guildId && - !this._checkAndIncrement( + if (guildId) { + const guildCheck = this._checkAndIncrement( 'guildId', guildId, this.config.perGuildId.maxRequests, this.config.perGuildId.timeWindowMs ) - ) { - logger( - 'warn', - 'RateLimit', - `Guild-Id rate limit exceeded for ${guildId} (IP: ${remoteAddress}, User: ${userId})` - ) - return false + if (!guildCheck.allowed) { + logger( + 'warn', + 'RateLimit', + `Guild-Id rate limit exceeded for ${guildId} (IP: ${remoteAddress}, User: ${userId})` + ) + return guildCheck + } } - return true + return { allowed: true } } _cleanup() { @@ -140,6 +152,10 @@ export default class RateLimitManager { } } + clear() { + this.store.clear() + } + destroy() { clearInterval(this.cleanupInterval) this.store.clear() diff --git a/src/managers/routePlannerManager.js b/src/managers/routePlannerManager.js index 7614657..7f5b616 100644 --- a/src/managers/routePlannerManager.js +++ b/src/managers/routePlannerManager.js @@ -4,127 +4,194 @@ export default class RoutePlannerManager { constructor(nodelink) { this.nodelink = nodelink this.config = nodelink.options.routePlanner - this.ipBlocks = [] + this.blocks = [] this.bannedIps = new Map() - this.lastUsedIndex = -1 + this.bannedBlocks = new Map() + this.lastUsedBlockIndex = -1 if (this.config?.ipBlocks?.length > 0) { this._loadIpBlocks() } } - _ipToInt(ip) { - return ( - ip.split('.').reduce((acc, oct) => (acc << 8) + parseInt(oct, 10), 0) >>> - 0 - ) - } - - _intToIp(int) { - return [ - (int >>> 24) & 0xff, - (int >>> 16) & 0xff, - (int >>> 8) & 0xff, - int & 0xff - ].join('.') + _ipToBigInt(ip) { + if (ip.includes(':')) { + const parts = ip.split(':') + const fullParts = [] + for (let i = 0; i < parts.length; i++) { + if (parts[i] === '') { + const missing = 8 - (parts.length - 1) + fullParts.push(...Array(missing).fill('0000')) + } else { + fullParts.push(parts[i].padStart(4, '0')) + } + } + return BigInt(`0x${fullParts.join('')}`) + } else { + return BigInt( + ip.split('.').reduce((acc, oct) => (acc << 8n) + BigInt(oct), 0n) + ) + } } - _generateIpsFromCidr(cidr) { - const [baseIp, maskLength] = cidr.split('/') - if (!baseIp || !maskLength) throw new Error(`Invalid CIDR: ${cidr}`) - - const mask = ~(2 ** (32 - parseInt(maskLength)) - 1) >>> 0 - const baseInt = this._ipToInt(baseIp) & mask - const numberOfIps = 2 ** (32 - parseInt(maskLength)) - const ips = [] - - for (let i = 0; i < numberOfIps; i++) { - ips.push(this._intToIp(baseInt + i)) + _bigIntToIp(bigint, isIpv6) { + if (isIpv6) { + const hex = bigint.toString(16).padStart(32, '0') + const parts = [] + for (let i = 0; i < 8; i++) { + parts.push(hex.substring(i * 4, i * 4 + 4)) + } + return parts.join(':').replace(/\b0{1,3}/g, '') + } else { + const parts = [] + for (let i = 0; i < 4; i++) { + parts.unshift(Number(bigint & 255n)) + bigint >>= 8n + } + return parts.join('.') } - return ips } _loadIpBlocks() { - for (const block of this.config.ipBlocks) { + for (const blockConfig of this.config.ipBlocks) { try { - const ips = this._generateIpsFromCidr(block.cidr) - this.ipBlocks.push(...ips) + const [baseIp, maskLengthStr] = blockConfig.cidr.split('/') + const maskLength = parseInt(maskLengthStr, 10) + const isIpv6 = baseIp.includes(':') + const totalBits = isIpv6 ? 128n : 32n + + const baseInt = this._ipToBigInt(baseIp) + const mask = + ((1n << BigInt(maskLength)) - 1n) << (totalBits - BigInt(maskLength)) + const networkInt = baseInt & mask + const size = 1n << (totalBits - BigInt(maskLength)) + + this.blocks.push({ + cidr: blockConfig.cidr, + networkInt, + size, + lastUsedOffset: -1n, + isIpv6 + }) } catch (e) { logger( 'error', 'RoutePlanner', - `Failed to parse IP block ${block.cidr}: ${e.message}` + `Failed to parse block ${blockConfig.cidr}: ${e.message}` ) } } logger( 'info', 'RoutePlanner', - `Loaded ${this.ipBlocks.length} IPs from ${this.config.ipBlocks.length} blocks.` + `Initialized with ${this.blocks.length} IP blocks.` ) } getIP() { - if (this.ipBlocks.length === 0) return null - - const strategy = this.config.strategy || 'RoundRobin' + if (this.blocks.length === 0) return null + const strategy = this.config.strategy || 'RotateOnBan' switch (strategy) { case 'RoundRobin': - return this._getRoundRobinIp() case 'RotateOnBan': - return this._getRotateOnBanIp() + return this._getNextIp() case 'LoadBalance': return this._getRandomIp() default: - return this._getRoundRobinIp() + return this._getNextIp() } } - _getRoundRobinIp() { - if (this.ipBlocks.length === 0) return null - this.lastUsedIndex = (this.lastUsedIndex + 1) % this.ipBlocks.length - return this.ipBlocks[this.lastUsedIndex] - } - - _getRotateOnBanIp() { - if (this.ipBlocks.length === 0) return null - + _getNextIp() { const now = Date.now() - for (let i = 0; i < this.ipBlocks.length; i++) { - this.lastUsedIndex = (this.lastUsedIndex + 1) % this.ipBlocks.length - const ip = this.ipBlocks[this.lastUsedIndex] - const bannedUntil = this.bannedIps.get(ip) - - if (!bannedUntil || now > bannedUntil) { - return ip + const _startBlockIdx = this.lastUsedBlockIndex + + for (let i = 0; i < this.blocks.length; i++) { + this.lastUsedBlockIndex = + (this.lastUsedBlockIndex + 1) % this.blocks.length + const block = this.blocks[this.lastUsedBlockIndex] + + if ( + this.bannedBlocks.has(block.cidr) && + now < this.bannedBlocks.get(block.cidr) + ) + continue + + for (let attempt = 0; attempt < 10; attempt++) { + block.lastUsedOffset = (block.lastUsedOffset + 1n) % block.size + const ipInt = block.networkInt + block.lastUsedOffset + const ip = this._bigIntToIp(ipInt, block.isIpv6) + + if (!this.bannedIps.has(ip) || now > this.bannedIps.get(ip)) { + return ip + } } } - logger('warn', 'RoutePlanner', 'All IPs are currently banned.') return null } _getRandomIp() { const now = Date.now() - const availableIps = this.ipBlocks.filter((ip) => { - const bannedUntil = this.bannedIps.get(ip) - return !bannedUntil || now > bannedUntil - }) + const availableBlocks = this.blocks.filter( + (b) => + !this.bannedBlocks.has(b.cidr) || now > this.bannedBlocks.get(b.cidr) + ) - if (availableIps.length === 0) { - logger('warn', 'RoutePlanner', 'All IPs are currently banned.') - return null - } + if (availableBlocks.length === 0) return null + + const block = + availableBlocks[Math.floor(Math.random() * availableBlocks.length)] - const ip = availableIps[Math.floor(Math.random() * availableIps.length)] - return ip + const randomOffset = BigInt( + Math.floor( + Math.random() * + Number( + block.size > BigInt(Number.MAX_SAFE_INTEGER) + ? Number.MAX_SAFE_INTEGER + : block.size + ) + ) + ) + const ipInt = block.networkInt + randomOffset + return this._bigIntToIp(ipInt, block.isIpv6) } banIP(ip) { if (!ip) return const cooldown = this.config.bannedIpCooldown || 600000 - this.bannedIps.set(ip, Date.now() + cooldown) + const now = Date.now() + this.bannedIps.set(ip, now + cooldown) + + // Check if we should ban the whole block (if many IPs are failing) + const block = this.blocks.find((b) => { + const ipInt = this._ipToBigInt(ip) + return ipInt >= b.networkInt && ipInt < b.networkInt + b.size + }) + + if (block) { + let failedInBlock = 0 + for (const bannedIp of this.bannedIps.keys()) { + const bIpInt = this._ipToBigInt(bannedIp) + if ( + bIpInt >= block.networkInt && + bIpInt < block.networkInt + block.size + ) { + failedInBlock++ + } + } + + if (failedInBlock >= 5) { + this.bannedBlocks.set(block.cidr, now + cooldown * 2) + logger( + 'warn', + 'RoutePlanner', + `Banning Block: ${block.cidr} due to multiple failures.` + ) + } + } + logger('warn', 'RoutePlanner', `Banning IP: ${ip} for ${cooldown}ms`) } @@ -137,6 +204,7 @@ export default class RoutePlannerManager { freeAll() { this.bannedIps.clear() - logger('info', 'RoutePlanner', 'Freed all banned IPs.') + this.bannedBlocks.clear() + logger('info', 'RoutePlanner', 'Freed all banned IPs and blocks.') } } diff --git a/src/managers/sessionManager.js b/src/managers/sessionManager.js index d3a3173..8251564 100644 --- a/src/managers/sessionManager.js +++ b/src/managers/sessionManager.js @@ -93,6 +93,11 @@ export default class SessionManager { async destroy(session) { if (!session) return + if (session.timeoutFuture) { + clearTimeout(session.timeoutFuture) + session.timeoutFuture = null + } + logger( 'debug', 'SessionManager', @@ -118,7 +123,7 @@ export default class SessionManager { } } - session.socket?.destroy() + session.socket?.destroy?.() } async shutdown(sessionId) { diff --git a/src/managers/sourceManager.js b/src/managers/sourceManager.js index 16cab5f..57307e9 100644 --- a/src/managers/sourceManager.js +++ b/src/managers/sourceManager.js @@ -13,7 +13,8 @@ export default class SourcesManager { constructor(nodelink) { this.nodelink = nodelink this.sources = new Map() - this.searchTermMap = new Map() + this.sourceMap = new Map() + this.searchAliasMap = new Map() this.patternMap = [] } @@ -23,158 +24,93 @@ export default class SourcesManager { const sourcesDir = path.join(__dirname, '../sources') this.sources.clear() - this.searchTermMap.clear() + this.sourceMap.clear() + this.searchAliasMap.clear() this.patternMap = [] - if (sourceRegistry && Object.keys(sourceRegistry).length > 0) { - await Promise.all( - Object.entries(sourceRegistry).map(async ([name, mod]) => { - const isYouTube = name === 'youtube' || name.includes('YouTube.js') - const enabled = isYouTube - ? this.nodelink.options.sources.youtube?.enabled - : !!this.nodelink.options.sources[name]?.enabled + const processSource = async (name, mod) => { + const isYouTube = name === 'youtube' || name.includes('YouTube.js') + const sourceKey = isYouTube ? 'youtube' : name - if (!enabled) return + const enabled = isYouTube + ? this.nodelink.options.sources.youtube?.enabled + : !!this.nodelink.options.sources[sourceKey]?.enabled - const Mod = mod.default || mod - const instance = new Mod(this.nodelink) + if (!enabled) return - if (await instance.setup()) { - const sourceKey = isYouTube ? 'youtube' : name - this.sources.set(sourceKey, instance) + const Mod = mod.default || mod + const instance = new Mod(this.nodelink) - if (isYouTube) this.sources.set('ytmusic', instance) - - if (Array.isArray(instance.searchTerms)) { - for (const term of instance.searchTerms) { - this.searchTermMap.set(term, sourceKey) - } - } + if (await instance.setup()) { + this.sources.set(sourceKey, instance) + this.sourceMap.set(sourceKey, instance) - if (Array.isArray(instance.patterns)) { - for (const regex of instance.patterns) { - if (regex instanceof RegExp) { - this.patternMap.push({ - regex, - sourceName: sourceKey, - priority: instance.priority || 0 - }) - } - } - } - logger('info', 'Sources', `Loaded source: ${sourceKey}`) + if (Array.isArray(instance.additionalsSourceName)) { + for (const addName of instance.additionalsSourceName) { + this.sourceMap.set(addName, instance) } - }) - ) - this.patternMap.sort((a, b) => b.priority - a.priority) - return - } - - try { - await fs.access(sourcesDir) - const files = await fs.readdir(sourcesDir) - const jsFiles = files.filter((f) => f.endsWith('.js')) - const toLoad = jsFiles.filter((f) => { - const name = path.basename(f, '.js') - return ( - name !== 'youtube' && !!this.nodelink.options.sources[name]?.enabled - ) - }) - - if (this.nodelink.options.sources.youtube?.enabled) { - const name = 'youtube' - const filePath = path.join(sourcesDir, 'youtube', 'YouTube.js') - const fileUrl = new URL(`file://${filePath.replace(/\\/g, '/')}`) - const Mod = (await import(fileUrl)).default - - const instance = new Mod(this.nodelink) - if (await instance.setup()) { - this.sources.set(name, instance) + } - this.sources.set('ytmusic', instance) + if (Array.isArray(instance.searchTerms)) { + for (const term of instance.searchTerms) { + this.searchAliasMap.set(term, instance) + } + } - if (Array.isArray(instance.searchTerms)) { - for (const term of instance.searchTerms) { - this.searchTermMap.set(term, name) - } + if (Array.isArray(instance.recommendationTerm)) { + for (const term of instance.recommendationTerm) { + this.searchAliasMap.set(term, instance) } + } - if (Array.isArray(instance.patterns)) { - for (const regex of instance.patterns) { - if (regex instanceof RegExp) { - this.patternMap.push({ - regex, - sourceName: name, - priority: instance.priority || 0 - }) - } + if (Array.isArray(instance.patterns)) { + for (const regex of instance.patterns) { + if (regex instanceof RegExp) { + this.patternMap.push({ + regex, + sourceName: sourceKey, + priority: instance.priority || 0 + }) } } - logger( - 'info', - 'Sources', - `Loaded source: ${name} ${instance.searchTerms?.length ? `(terms: ${instance.searchTerms.join(', ')})` : ''}` - ) - } else { - logger( - 'error', - 'Sources', - `Failed setup source: ${name}; source not available for use` - ) } + logger('info', 'Sources', `Loaded source: ${sourceKey}`) } + } + if (sourceRegistry && Object.keys(sourceRegistry).length > 0) { await Promise.all( - toLoad.map(async (file) => { - const name = path.basename(file, '.js') - const filePath = path.join(sourcesDir, file) - const fileUrl = new URL(`file://${filePath.replace(/\\/g, '/')}`) - const Mod = (await import(fileUrl)).default - - const instance = new Mod(this.nodelink) - if (await instance.setup()) { - this.sources.set(name, instance) - } else { - logger( - 'error', - 'Sources', - `Failed setup source: ${name}; source not available for use` - ) - return - } - - if (Array.isArray(instance.searchTerms)) { - for (const term of instance.searchTerms) { - this.searchTermMap.set(term, name) - } - } - - if (Array.isArray(instance.patterns)) { - for (const regex of instance.patterns) { - if (regex instanceof RegExp) { - this.patternMap.push({ - regex, - sourceName: name, - priority: instance.priority || 0 - }) - } - } - } - logger( - 'info', - 'Sources', - `Loaded source: ${name} ${instance.searchTerms?.length ? `(terms: ${instance.searchTerms.join(', ')})` : ''}` - ) - }) + Object.entries(sourceRegistry).map(([name, mod]) => + processSource(name, mod) + ) ) - } catch (e) { - logger('error', 'Sources', `Sources directory not found or error loading sources: ${sourcesDir} - ${e.message}`) + } else { + try { + await fs.access(sourcesDir) + const files = await fs.readdir(sourcesDir, { recursive: true }) + const jsFiles = files.filter( + (f) => f.endsWith('.js') && !f.includes('clients/') + ) + + await Promise.all( + jsFiles.map(async (file) => { + const name = path.basename(file, '.js').toLowerCase() + const filePath = path.join(sourcesDir, file) + const fileUrl = new URL(`file://${filePath.replace(/\\/g, '/')}`) + const mod = await import(fileUrl) + await processSource(name, mod) + }) + ) + } catch (e) { + logger('error', 'Sources', `Error loading sources: ${e.message}`) + } } + this.patternMap.sort((a, b) => b.priority - a.priority) } async _instrumentedSourceCall(sourceName, method, ...args) { - const instance = this.sources.get(sourceName) + const instance = this.sourceMap.get(sourceName) if (!instance || typeof instance[method] !== 'function') { this.nodelink.statsManager.incrementSourceFailure(sourceName || 'unknown') throw new Error( @@ -197,23 +133,69 @@ export default class SourcesManager { } async search(sourceTerm, query) { - const sourceName = this.searchTermMap.get(sourceTerm) - if (!sourceName) { - throw new Error(`Source not found for term: ${sourceTerm}`) + let instance = this.searchAliasMap.get(sourceTerm) + const sourceName = sourceTerm + + if (!instance) { + instance = this.sourceMap.get(sourceTerm) } - logger('debug', 'Sources', `Searching on ${sourceName} for: "${query}" `) - return this._instrumentedSourceCall(sourceName, 'search', query, sourceTerm) - } - async searchWithDefault(query) { - const defaultSource = this.nodelink.options.defaultSearchSource - const sourceName = this.searchTermMap.get(defaultSource) || defaultSource + if (!instance) { + throw new Error(`Source or search alias not found for: ${sourceTerm}`) + } + + let searchType = 'track' + let searchQuery = query + + if (query.includes(':')) { + const parts = query.split(':') + const possibleType = parts[0].toLowerCase() + const types = ['playlist', 'artist', 'album', 'channel', 'track'] + + if (types.includes(possibleType)) { + searchType = possibleType + searchQuery = parts.slice(1).join(':') + } + } + + const name = instance.constructor.name.replace('Source', '').toLowerCase() logger( 'debug', 'Sources', - `Searching on default source "${sourceName}" for: "${query}"` + `Searching on ${name} (${searchType}) for: "${searchQuery}"` + ) + return this._instrumentedSourceCall( + name, + 'search', + searchQuery, + sourceName, + searchType ) - return this._instrumentedSourceCall(sourceName, 'search', query) + } + + async searchWithDefault(query) { + const defaultSources = Array.isArray( + this.nodelink.options.defaultSearchSource + ) + ? this.nodelink.options.defaultSearchSource + : [this.nodelink.options.defaultSearchSource] + + for (const source of defaultSources) { + try { + const result = await this.search(source, query) + if (result.loadType === 'search' && result.data.length > 0) { + return result + } + } catch (e) { + logger( + 'warn', + 'Sources', + `Default source search failed for ${source}: ${e.message}` + ) + } + } + + return { loadType: 'empty', data: {} } } async unifiedSearch(query) { @@ -233,7 +215,7 @@ export default class SourcesManager { 'Sources', `A source (${sourceName}) failed during unified search: ${e.message}` ) - return { loadType: 'error', data: { message: e.message } } // Return an error object to not break allSettled + return { loadType: 'error', data: { message: e.message } } }) ) @@ -264,9 +246,14 @@ export default class SourcesManager { } async resolve(url) { - let sourceName = this.patternMap.find(({ regex }) => - regex.test(url) - )?.sourceName + let sourceName = null + + for (let i = 0; i < this.patternMap.length; i++) { + if (this.patternMap[i].regex.test(url)) { + sourceName = this.patternMap[i].sourceName + break + } + } if ( !sourceName && @@ -275,7 +262,7 @@ export default class SourcesManager { sourceName = 'http' } - if (!sourceName) { + if (!sourceName || !this.sourceMap.has(sourceName)) { logger('warn', 'Sources', `No source found for URL: ${url}`) return { loadType: 'error', @@ -296,17 +283,20 @@ export default class SourcesManager { } async getTrackUrl(track, itag) { - const instance = this.sources.get(track.sourceName) + const instance = this.sourceMap.get(track.sourceName) return await instance.getTrackUrl(track, itag) } async getTrackStream(track, url, protocol, additionalData) { - const instance = this.sources.get(track.sourceName) + const instance = this.sourceMap.get(track.sourceName) return await instance.loadStream(track, url, protocol, additionalData) } async getChapters(track) { - const instance = this.sources.get(track.info.sourceName) + const sourceName = track.info?.sourceName + if (!sourceName) return [] + + const instance = this.sourceMap.get(sourceName) if (!instance || typeof instance.getChapters !== 'function') { return [] } @@ -317,6 +307,10 @@ export default class SourcesManager { return Array.from(this.sources.values()) } + getSource(name) { + return this.sourceMap.get(name) + } + getEnabledSourceNames() { const enabledNames = [] for (const sourceName in this.nodelink.options.sources) { diff --git a/src/managers/sourceWorkerManager.js b/src/managers/sourceWorkerManager.js new file mode 100644 index 0000000..ebe3526 --- /dev/null +++ b/src/managers/sourceWorkerManager.js @@ -0,0 +1,215 @@ +import cluster from 'node:cluster' +import crypto from 'node:crypto' +import net from 'node:net' +import os from 'node:os' +import { logger } from '../utils.js' + +class SourceWorkerManager { + constructor(nodelink) { + this.nodelink = nodelink + this.workers = [] + this.requests = new Map() + this.workerLoads = new Map() // worker.id -> pending count + this.socketPath = + os.platform() === 'win32' + ? `\\\\.\\pipe\\nodelink-source-${crypto.randomBytes(8).toString('hex')}` + : `/tmp/nodelink-source-${crypto.randomBytes(8).toString('hex')}.sock` + this.server = null + } + + async start() { + this.server = net.createServer((socket) => { + let buffer = Buffer.alloc(0) + + socket.on('data', (chunk) => { + buffer = Buffer.concat([buffer, chunk]) + + while (buffer.length >= 6) { + const idSize = buffer.readUInt8(0) + const type = buffer.readUInt8(1) + const payloadSize = buffer.readUInt32BE(2) + const totalSize = 6 + idSize + payloadSize + + if (buffer.length < totalSize) break + + const id = buffer.toString('utf8', 6, 6 + idSize) + const payload = buffer.subarray(6 + idSize, totalSize) + buffer = buffer.subarray(totalSize) + + const request = this.requests.get(id) + if (request) { + if (type === 0) { + if (request.timeout) { + clearTimeout(request.timeout) + request.timeout = null + } + if (!request.res.headersSent) { + const headers = request.options?.headers + if (headers) { + for (const [key, value] of Object.entries(headers)) { + request.res.setHeader(key, value) + } + } else { + request.res.setHeader('Content-Type', 'application/json') + } + request.res.writeHead(request.options?.statusCode || 200) + } + request.res.write(payload) + } else if (type === 1) { + request.res.end() + this._cleanupRequest(id, request) + } else if (type === 2) { + const errorMsg = payload.toString('utf8') + if (!request.res.headersSent) { + request.res.writeHead(500, { + 'Content-Type': 'application/json' + }) + request.res.end( + JSON.stringify({ + timestamp: Date.now(), + status: 500, + error: 'Worker Error', + message: errorMsg, + path: request.req.url + }) + ) + } else { + request.res.end() + } + this._cleanupRequest(id, request) + } + } + } + }) + }) + + await new Promise((resolve, reject) => { + this.server.on('error', (err) => { + logger('error', 'SourceCluster', `Server error: ${err.message}`) + reject(err) + }) + this.server.listen(this.socketPath, () => { + logger( + 'info', + 'SourceCluster', + `Source server listening at ${this.socketPath}` + ) + resolve() + }) + }) + + const processCount = + this.nodelink.options.cluster?.specializedSourceWorker?.count || 1 + cluster.setupPrimary({ exec: './src/sourceWorker.js' }) + + for (let i = 0; i < processCount; i++) { + this._forkWorker() + } + + cluster.setupPrimary({ exec: './src/index.js' }) + + cluster.on('exit', (worker, _code, _signal) => { + if (worker.workerType !== 'source') return + + logger( + 'warn', + 'SourceCluster', + `Source worker manager ${worker.process.pid} exited. Respawning...` + ) + const index = this.workers.indexOf(worker) + if (index !== -1) this.workers.splice(index, 1) + this.workerLoads.delete(worker.id) + + cluster.setupPrimary({ exec: './src/sourceWorker.js' }) + this._forkWorker() + cluster.setupPrimary({ exec: './src/index.js' }) + }) + } + + _forkWorker() { + const worker = cluster.fork() + worker.workerType = 'source' + worker.on('message', (msg) => { + if (msg.type === 'ready') + logger( + 'info', + 'SourceCluster', + `Source worker manager ${msg.pid} ready` + ) + }) + this.workers.push(worker) + this.workerLoads.set(worker.id, 0) + } + + _decrementLoad(workerId) { + const load = this.workerLoads.get(workerId) || 0 + this.workerLoads.set(workerId, Math.max(0, load - 1)) + } + + _cleanupRequest(id, request) { + if (!request || request.cleaned) return + request.cleaned = true + if (request.timeout) clearTimeout(request.timeout) + this._decrementLoad(request.workerId) + this.requests.delete(id) + } + + delegate(req, res, task, payload, options = {}) { + const id = crypto.randomBytes(16).toString('hex') + + let bestWorker = null + let minLoad = Number.POSITIVE_INFINITY + + for (const worker of this.workers) { + const load = this.workerLoads.get(worker.id) || 0 + if (load < minLoad) { + minLoad = load + bestWorker = worker + } + } + + if (!bestWorker) return false + + const request = { + req, + res, + timeout: null, + workerId: bestWorker.id, + options, + cleaned: false + } + request.timeout = setTimeout(() => { + const activeRequest = this.requests.get(id) + if (activeRequest) { + res.writeHead(504, { 'Content-Type': 'application/json' }) + res.end( + JSON.stringify({ + error: 'Gateway Timeout', + message: 'Source worker timed out' + }) + ) + this._cleanupRequest(id, activeRequest) + } + }, 60000) + this.requests.set(id, request) + this.workerLoads.set(bestWorker.id, minLoad + 1) + + res.on?.('close', () => { + this._cleanupRequest(id, request) + }) + + bestWorker.send({ + type: 'sourceTask', + payload: { + id, + task, + payload, + socketPath: this.socketPath + } + }) + + return true + } +} + +export default SourceWorkerManager diff --git a/src/managers/statsManager.js b/src/managers/statsManager.js index 2dce961..60f8208 100644 --- a/src/managers/statsManager.js +++ b/src/managers/statsManager.js @@ -17,7 +17,7 @@ export default class StatsManager { events: {} // { TrackStartEvent: 10, ... } } } - + logger('info', 'StatsManager', 'Initialized.') } @@ -29,7 +29,7 @@ export default class StatsManager { let promClient try { promClient = await import('prom-client') - } catch (e) { + } catch (_e) { logger( 'error', 'StatsManager', @@ -206,6 +206,13 @@ export default class StatsManager { registers: [this.promRegister] }) + this.promWorkerEventLoopLag = new Gauge({ + name: 'nodelink_worker_event_loop_lag_ms', + help: 'Worker event loop lag in milliseconds', + labelNames: ['worker_id', 'worker_pid'], + registers: [this.promRegister] + }) + this.promWorkerCommandQueueLength = new Gauge({ name: 'nodelink_worker_command_queue_length', help: 'Worker command queue length', @@ -447,11 +454,15 @@ export default class StatsManager { incrementApiRequest(endpoint) { const sanitized = this._sanitizeEndpoint(endpoint) - - if (Object.keys(this.stats.api.requests).length > 500 && !this.stats.api.requests[sanitized]) { - this.stats.api.requests['others'] = (this.stats.api.requests['others'] || 0) + 1 + + if ( + Object.keys(this.stats.api.requests).length > 500 && + !this.stats.api.requests[sanitized] + ) { + this.stats.api.requests.others = (this.stats.api.requests.others || 0) + 1 } else { - this.stats.api.requests[sanitized] = (this.stats.api.requests[sanitized] || 0) + 1 + this.stats.api.requests[sanitized] = + (this.stats.api.requests[sanitized] || 0) + 1 } if (this.promApiRequests) { @@ -461,11 +472,15 @@ export default class StatsManager { incrementApiError(endpoint) { const sanitized = this._sanitizeEndpoint(endpoint) - - if (Object.keys(this.stats.api.errors).length > 500 && !this.stats.api.errors[sanitized]) { - this.stats.api.errors['others'] = (this.stats.api.errors['others'] || 0) + 1 + + if ( + Object.keys(this.stats.api.errors).length > 500 && + !this.stats.api.errors[sanitized] + ) { + this.stats.api.errors.others = (this.stats.api.errors.others || 0) + 1 } else { - this.stats.api.errors[sanitized] = (this.stats.api.errors[sanitized] || 0) + 1 + this.stats.api.errors[sanitized] = + (this.stats.api.errors[sanitized] || 0) + 1 } if (this.promApiErrors) { @@ -561,31 +576,52 @@ export default class StatsManager { this.promTotalWorkers.set(Object.keys(workerMetrics).length) } - for (const [uniqueWorkerId, workerData] of Object.entries(workerMetrics)) { + for (const [uniqueWorkerId, workerData] of Object.entries( + workerMetrics + )) { const { pid, stats, health, uptime } = workerData - const labels = { worker_id: String(uniqueWorkerId), worker_pid: String(pid) } + const labels = { + worker_id: String(uniqueWorkerId), + worker_pid: String(pid) + } this.promWorkerPlayers.set(labels, stats.players || 0) this.promWorkerPlayingPlayers.set(labels, stats.playingPlayers || 0) if (stats.memory) { this.promWorkerMemoryUsed.set(labels, stats.memory.used || 0) - this.promWorkerMemoryAllocated.set(labels, stats.memory.allocated || 0) + this.promWorkerMemoryAllocated.set( + labels, + stats.memory.allocated || 0 + ) } if (stats.cpu) { this.promWorkerCpuLoad.set(labels, stats.cpu.nodelinkLoad || 0) } + if (stats.eventLoopLag !== undefined && this.promWorkerEventLoopLag) { + this.promWorkerEventLoopLag.set(labels, stats.eventLoopLag || 0) + } + if (stats.commandQueueLength !== undefined) { - this.promWorkerCommandQueueLength.set(labels, stats.commandQueueLength || 0) + this.promWorkerCommandQueueLength.set( + labels, + stats.commandQueueLength || 0 + ) } if (stats.frameStats) { this.promWorkerFramesSent.set(labels, stats.frameStats.sent || 0) this.promWorkerFramesNulled.set(labels, stats.frameStats.nulled || 0) - this.promWorkerFramesDeficit.set(labels, stats.frameStats.deficit || 0) - this.promWorkerFramesExpected.set(labels, stats.frameStats.expected || 0) + this.promWorkerFramesDeficit.set( + labels, + stats.frameStats.deficit || 0 + ) + this.promWorkerFramesExpected.set( + labels, + stats.frameStats.expected || 0 + ) } if (uptime !== undefined) { @@ -621,7 +657,12 @@ export default class StatsManager { } recordCommandExecutionTime(commandType, workerId, durationMs) { - if (this.promCommandExecutionTime && commandType && workerId && typeof durationMs === 'number') { + if ( + this.promCommandExecutionTime && + commandType && + workerId && + typeof durationMs === 'number' + ) { this.promCommandExecutionTime.set( { command_type: commandType, worker_id: String(workerId) }, durationMs @@ -649,7 +690,7 @@ export default class StatsManager { incrementPlayerDestruction(sessionId, reason) { if (this.promPlayerDestructions && sessionId) { - const sanitizedSessionId = 'session_' + sessionId.substring(0, 4) + '...' + const sanitizedSessionId = `session_${sessionId.substring(0, 4)}...` this.promPlayerDestructions.inc({ session_id: sanitizedSessionId, reason: reason || 'unknown' @@ -664,7 +705,11 @@ export default class StatsManager { } recordTrackLoadDuration(source, durationMs) { - if (this.promTrackLoadDuration && source && typeof durationMs === 'number') { + if ( + this.promTrackLoadDuration && + source && + typeof durationMs === 'number' + ) { this.promTrackLoadDuration.set({ source }, durationMs) } } @@ -677,7 +722,7 @@ export default class StatsManager { incrementPlayerStuck(guildId, reason) { if (this.promPlayerStuck && guildId && reason) { - const sanitizedGuildId = 'guild_' + guildId.substring(0, 4) + '...' + const sanitizedGuildId = `guild_${guildId.substring(0, 4)}...` this.promPlayerStuck.inc({ guild_id: sanitizedGuildId, reason }) } } @@ -737,7 +782,13 @@ export default class StatsManager { } recordHttpRequestDuration(endpoint, method, statusCode, durationMs) { - if (this.promHttpRequestDuration && endpoint && method && statusCode && typeof durationMs === 'number') { + if ( + this.promHttpRequestDuration && + endpoint && + method && + statusCode && + typeof durationMs === 'number' + ) { const sanitized = this._sanitizeEndpoint(endpoint) this.promHttpRequestDuration.set( { endpoint: sanitized, method, status_code: String(statusCode) }, @@ -749,14 +800,18 @@ export default class StatsManager { incrementRateLimitHit(endpoint, ip) { if (this.promRateLimitHits && endpoint && ip) { const sanitized = this._sanitizeEndpoint(endpoint) - const sanitizedIp = ip.includes(':') ? '[IPv6]' : ip.split('.').slice(0, 2).join('.') + '.xxx.xxx' + const sanitizedIp = ip.includes(':') + ? '[IPv6]' + : `${ip.split('.').slice(0, 2).join('.')}.xxx.xxx` this.promRateLimitHits.inc({ endpoint: sanitized, ip: sanitizedIp }) } } incrementDosProtectionBlock(ip, reason) { if (this.promDosProtectionBlocks && ip && reason) { - const sanitizedIp = ip.includes(':') ? '[IPv6]' : ip.split('.').slice(0, 2).join('.') + '.xxx.xxx' + const sanitizedIp = ip.includes(':') + ? '[IPv6]' + : `${ip.split('.').slice(0, 2).join('.')}.xxx.xxx` this.promDosProtectionBlocks.inc({ ip: sanitizedIp, reason }) } } diff --git a/src/managers/workerManager.js b/src/managers/workerManager.js index 10debd5..a58c2b5 100644 --- a/src/managers/workerManager.js +++ b/src/managers/workerManager.js @@ -1,6 +1,8 @@ import cluster from 'node:cluster' import crypto from 'node:crypto' +import net from 'node:net' import os from 'node:os' +import v8 from 'node:v8' import { logger } from '../utils.js' @@ -13,6 +15,7 @@ export default class WorkerManager { this.workerToGuilds = new Map() this.nextStatelessWorkerIndex = 0 this.pendingRequests = new Map() + this.streamRequests = new Map() this.maxWorkers = config.cluster.workers === 0 ? os.cpus().length @@ -29,36 +32,66 @@ export default class WorkerManager { this.workerHealth = new Map() this.workerStartTime = new Map() this.workerUniqueId = new Map() + this.workerReady = new Set() this.nextWorkerId = 1 this.liveYoutubeConfig = { refreshToken: null, visitorData: null } + this.isDestroying = false this.commandTimeout = config.cluster?.commandTimeout || 45000 this.fastCommandTimeout = config.cluster?.fastCommandTimeout || 10000 this.maxRetries = config.cluster?.maxRetries || 2 this.scalingConfig = { - maxPlayersPerWorker: config.cluster.scaling?.maxPlayersPerWorker || 20, + maxPlayersPerWorker: + config.cluster.scaling?.maxPlayersPerWorker || + config.cluster.workers || + 20, targetUtilization: config.cluster.scaling?.targetUtilization || 0.7, scaleUpThreshold: config.cluster.scaling?.scaleUpThreshold || 0.75, scaleDownThreshold: config.cluster.scaling?.scaleDownThreshold || 0.3, idleWorkerTimeoutMs: config.cluster.scaling?.idleWorkerTimeoutMs || 60000, - checkIntervalMs: config.cluster.scaling?.checkIntervalMs || 5000 + checkIntervalMs: config.cluster.scaling?.checkIntervalMs || 5000, + lagPenaltyLimit: config.cluster.scaling?.lagPenaltyLimit || 60, + cpuPenaltyLimit: config.cluster.scaling?.cpuPenaltyLimit || 0.85 } + this.socketPath = + os.platform() === 'win32' + ? `\\\\.\\pipe\\nodelink-events-${crypto.randomBytes(8).toString('hex')}` + : `/tmp/nodelink-events-${crypto.randomBytes(8).toString('hex')}.sock` + this.server = null + this.commandSocketPath = + os.platform() === 'win32' + ? `\\\\.\\pipe\\nodelink-commands-${crypto.randomBytes(8).toString('hex')}` + : `/tmp/nodelink-commands-${crypto.randomBytes(8).toString('hex')}.sock` + this.commandServer = null + this.commandSockets = new Map() + logger( 'info', 'Cluster', `Primary PID ${process.pid} - WorkerManager initialized. Min: ${this.minWorkers}, Max: ${this.maxWorkers} workers` ) + this._startSocketServer() + this._startCommandSocketServer() this._ensureWorkerAvailability() this._startScalingCheck() this._startHealthCheck() cluster.on('exit', (worker, code, signal) => { - logger( - 'warn', - 'Cluster', - `Worker ${worker.process.pid} exited (code=${code}, signal=${signal})` - ) + if (worker.workerType !== 'playback') return + + const isSystemSignal = + signal === 'SIGINT' || + signal === 'SIGTERM' || + code === 130 || + code === 143 + if (this.isDestroying || isSystemSignal) { + const index = this.workers.indexOf(worker) + if (index !== -1) this.workers.splice(index, 1) + this.workersById.delete(worker.id) + return + } + this._updateWorkerFailureHistory(worker.id, code, signal) if (global.nodelink?.statsManager) { @@ -79,22 +112,22 @@ export default class WorkerManager { ) if (shouldRespawn) { - logger( - 'info', - 'Cluster', - 'Respawning worker...' - ) + logger('info', 'Cluster', 'Respawning worker...') + const history = this.workerFailureHistory.get(worker.id) + const delay = history ? Math.min(history.count * 1000, 30000) : 500 + setTimeout(() => { this.forkWorker() if (global.nodelink?.statsManager) { global.nodelink.statsManager.incrementWorkerRestart(worker.id) } - }, 500) + }, delay) } }) } - _shouldRespawnWorker(workerId, exitCode, affectedGuildsCount) { + _shouldRespawnWorker(workerId, _exitCode, affectedGuildsCount) { + if (this.isDestroying) return false if (this.workers.length < this.minWorkers) return true if (affectedGuildsCount > 0) return true @@ -157,22 +190,25 @@ export default class WorkerManager { `Retrying command after worker ${workerId} exit (attempt ${request.retryCount + 1})` ) - setTimeout(() => { - const newWorker = this.getBestWorker() - if (newWorker) { - this._executeCommand( - newWorker, - request.type, - request.payload, - request.resolve, - request.reject, - request.retryCount + 1, - request.isFast - ) - } else { - request.reject(new Error('No workers available for retry')) - } - }, 500 * Math.pow(2, request.retryCount)) + setTimeout( + () => { + const newWorker = this.getBestWorker() + if (newWorker) { + this._executeCommand( + newWorker, + request.type, + request.payload, + request.resolve, + request.reject, + request.retryCount + 1, + request.isFast + ) + } else { + request.reject(new Error('No workers available for retry')) + } + }, + 500 * 2 ** request.retryCount + ) } else { request.reject( new Error(`Worker ${workerId} exited before completing request`) @@ -207,58 +243,44 @@ export default class WorkerManager { _scaleWorkers() { let activeCount = 0 - let totalPlayers = 0 + let totalCost = 0 const metrics = [] for (const worker of this.workers) { if (worker.isConnected()) { activeCount++ - const load = this.workerLoad.get(worker.id) || 0 - totalPlayers += load - metrics.push({ worker, load }) + const cost = this._calculateWorkerCost(worker.id) + totalCost += cost + metrics.push({ worker, cost }) } } - const { - maxPlayersPerWorker, - scaleUpThreshold, - scaleDownThreshold, - idleWorkerTimeoutMs - } = this.scalingConfig - const clusterCapacity = activeCount * maxPlayersPerWorker - const currentUtilization = - clusterCapacity > 0 ? totalPlayers / clusterCapacity : 0 + const averageCost = activeCount > 0 ? totalCost / activeCount : 0 + const { idleWorkerTimeoutMs, maxPlayersPerWorker, scaleUpThreshold } = + this.scalingConfig if ( - currentUtilization > scaleUpThreshold && + averageCost >= maxPlayersPerWorker * scaleUpThreshold && activeCount < this.maxWorkers ) { logger( 'info', 'Cluster', - `Scaling up: Current utilization ${currentUtilization.toFixed(2)} > ${scaleUpThreshold}. Forking new worker.` + `Scaling up: Average cost ${averageCost.toFixed(2)} reached threshold ${(maxPlayersPerWorker * scaleUpThreshold).toFixed(2)} (${scaleUpThreshold * 100}%). Forking new worker.` ) this.forkWorker() return } - if ( - currentUtilization < scaleDownThreshold && - activeCount > this.minWorkers - ) { + if (averageCost < 2 && activeCount > this.minWorkers) { const now = Date.now() - for (const { worker, load } of metrics) { - if (load === 0 && activeCount > this.minWorkers) { + for (const { worker, cost } of metrics) { + if (cost === 0 && activeCount > this.minWorkers) { const idleTime = this.idleWorkers.get(worker.id) if (!idleTime) { this.idleWorkers.set(worker.id, now) - logger( - 'debug', - 'Cluster', - `Worker ${worker.id} became idle. Start timeout for removal.` - ) } else if (now - idleTime > idleWorkerTimeoutMs) { logger( 'info', @@ -269,23 +291,38 @@ export default class WorkerManager { activeCount-- break } - } else if (load > 0) { - if (this.idleWorkers.has(worker.id)) { - this.idleWorkers.delete(worker.id) - logger('debug', 'Cluster', `Worker ${worker.id} is no longer idle.`) - } - } - } - } else { - for (const { worker, load } of metrics) { - if (load > 0 && this.idleWorkers.has(worker.id)) { + } else if (cost > 0) { this.idleWorkers.delete(worker.id) - logger('debug', 'Cluster', `Worker ${worker.id} is no longer idle.`) } } } } + _calculateWorkerCost(workerId) { + const stats = this.workerStats.get(workerId) + if (!stats) return 0 + + const playingWeight = 1.0 + const pausedWeight = 0.01 + + const playingCount = stats.playingPlayers || 0 + const pausedCount = Math.max(0, (stats.players || 0) - playingCount) + + let cost = playingCount * playingWeight + pausedCount * pausedWeight + + if (stats.isHibernating) return cost + + if (stats.cpu?.nodelinkLoad > this.scalingConfig.cpuPenaltyLimit) { + cost += this.scalingConfig.maxPlayersPerWorker + 5 + } + + if (stats.eventLoopLag > this.scalingConfig.lagPenaltyLimit) { + cost += this.scalingConfig.maxPlayersPerWorker / 2 + } + + return cost + } + _updateWorkerFailureHistory(workerId, code, signal) { let history = this.workerFailureHistory.get(workerId) @@ -313,6 +350,335 @@ export default class WorkerManager { ) } + _startSocketServer() { + this.server = net.createServer((socket) => { + let buffer = Buffer.alloc(0) + + socket.on('data', (chunk) => { + buffer = Buffer.concat([buffer, chunk]) + + while (buffer.length >= 6) { + const idSize = buffer.readUInt8(0) + const type = buffer.readUInt8(1) + const payloadSize = buffer.readUInt32BE(2) + const totalSize = 6 + idSize + payloadSize + + if (buffer.length < totalSize) break + + const id = buffer.toString('utf8', 6, 6 + idSize) + const payload = buffer.subarray(6 + idSize, totalSize) + buffer = buffer.subarray(totalSize) + + if (type === 5) { + this._handleStreamChunk(id, payload) + continue + } + if (type === 6) { + this._handleStreamEnd(id) + continue + } + if (type === 7) { + this._handleStreamError(id, payload.toString('utf8')) + continue + } + if (type === 8) { + if (global.nodelink?.handleVoiceFrame) { + try { + global.nodelink.handleVoiceFrame(payload) + } catch {} + } + continue + } + + try { + const data = JSON.parse(payload.toString('utf8')) + if (type === 3) { + // playerEvent + if (global.nodelink) + global.nodelink.handleIPCMessage({ + type: 'playerEvent', + payload: data + }) + } else if (type === 4) { + // workerStats + const workerId = data.workerId + delete data.workerId + this.statsUpdateBatch.set(workerId, data) + if (!this.statsUpdateTimer) { + this.statsUpdateTimer = setTimeout( + () => this._flushStatsUpdates(), + 100 + ) + } + } + } catch (e) { + logger('error', 'Cluster', `Socket event parse error: ${e.message}`) + } + } + }) + }) + + this.server.on('error', (err) => { + logger('error', 'Cluster', `Event socket server error: ${err.message}`) + }) + + this.server.listen(this.socketPath, () => { + logger( + 'info', + 'Cluster', + `Event socket server listening at ${this.socketPath}` + ) + }) + } + + _startCommandSocketServer() { + this.commandServer = net.createServer((socket) => { + let buffer = Buffer.alloc(0) + + socket.on('data', (chunk) => { + buffer = Buffer.concat([buffer, chunk]) + + while (buffer.length >= 6) { + const idSize = buffer.readUInt8(0) + const type = buffer.readUInt8(1) + const payloadSize = buffer.readUInt32BE(2) + const totalSize = 6 + idSize + payloadSize + + if (buffer.length < totalSize) break + + const id = buffer.toString('utf8', 6, 6 + idSize) + const payload = buffer.subarray(6 + idSize, totalSize) + buffer = buffer.subarray(totalSize) + + if (type === 0) { + try { + const data = JSON.parse(payload.toString('utf8')) + const pid = data?.pid + if (pid) this._registerCommandSocket(pid, socket) + } catch (e) { + logger( + 'error', + 'Cluster', + `Command socket hello parse error: ${e.message}` + ) + } + continue + } + + if (type === 2) { + let result + try { + result = v8.deserialize(payload) + } catch { + result = payload.toString('utf8') + } + this._handleCommandResponse(id, result) + continue + } + + if (type === 3) { + let errorMsg + try { + errorMsg = v8.deserialize(payload) + } catch { + errorMsg = payload.toString('utf8') + } + this._handleCommandResponse(id, null, errorMsg) + } + } + }) + + socket.on('close', () => this._removeCommandSocket(socket)) + socket.on('error', () => this._removeCommandSocket(socket)) + }) + + this.commandServer.on('error', (err) => { + logger('error', 'Cluster', `Command socket server error: ${err.message}`) + }) + + this.commandServer.listen(this.commandSocketPath, () => { + logger( + 'info', + 'Cluster', + `Command socket server listening at ${this.commandSocketPath}` + ) + }) + } + + _registerCommandSocket(pid, socket) { + const worker = this.workers.find((w) => w.process.pid === pid) + if (!worker) return + + const existing = this.commandSockets.get(worker.id) + if (existing && existing !== socket) { + try { + existing.destroy() + } catch {} + } + + socket._workerId = worker.id + this.commandSockets.set(worker.id, socket) + } + + _removeCommandSocket(socket) { + const workerId = socket?._workerId + if (!workerId) return + if (this.commandSockets.get(workerId) === socket) { + this.commandSockets.delete(workerId) + } + } + + _sendCommandSocketFrame(workerId, type, requestId, payloadBuf) { + const socket = this.commandSockets.get(workerId) + if (!socket || socket.destroyed) return false + + const idBuf = Buffer.from(requestId, 'utf8') + const header = Buffer.alloc(6) + header.writeUInt8(idBuf.length, 0) + header.writeUInt8(type, 1) + header.writeUInt32BE(payloadBuf.length, 2) + + socket.write(Buffer.concat([header, idBuf, payloadBuf])) + return true + } + + _handleStreamChunk(streamId, payload) { + const request = this.streamRequests.get(streamId) + if (!request) return + + if (request.timeout) { + clearTimeout(request.timeout) + request.timeout = null + } + + if (!request.res.headersSent) { + const headers = request.options?.headers + if (headers) { + for (const [key, value] of Object.entries(headers)) { + request.res.setHeader(key, value) + } + } + request.res.writeHead(request.options?.statusCode || 200) + } + + request.res.write(payload) + } + + _handleStreamEnd(streamId) { + const request = this.streamRequests.get(streamId) + if (!request) return + request.res.end() + this._cleanupStreamRequest(streamId, false) + } + + _handleStreamError(streamId, errorMsg) { + const request = this.streamRequests.get(streamId) + if (!request) return + + if (!request.res.headersSent) { + request.res.writeHead(500, { 'Content-Type': 'application/json' }) + request.res.end( + JSON.stringify({ + timestamp: Date.now(), + status: 500, + error: 'Worker Error', + message: errorMsg, + path: request.req.url + }) + ) + } else { + request.res.end() + } + + this._cleanupStreamRequest(streamId, false) + } + + _cleanupStreamRequest(streamId, sendCancel) { + const request = this.streamRequests.get(streamId) + if (!request || request.cleaned) return + request.cleaned = true + + if (request.timeout) clearTimeout(request.timeout) + this.streamRequests.delete(streamId) + + if (sendCancel) { + const worker = this.workersById.get(request.workerId) + if (worker?.isConnected()) { + this._sendStreamCommand(worker, { + type: 'cancelStream', + requestId: streamId, + payload: { streamId } + }) + } + } + } + + _failStreamsForWorker(workerId, reason = 'Worker exited') { + const streamIds = [] + for (const [streamId, request] of this.streamRequests) { + if (request.workerId !== workerId) continue + streamIds.push(streamId) + + if (!request.res.headersSent) { + request.res.writeHead(500, { 'Content-Type': 'application/json' }) + request.res.end( + JSON.stringify({ + timestamp: Date.now(), + status: 500, + error: 'Worker Error', + message: reason, + path: request.req.url + }) + ) + } else { + request.res.end() + } + } + + for (const streamId of streamIds) { + this._cleanupStreamRequest(streamId, false) + } + } + + _sendWorkerCommand(worker, type, requestId, payload) { + const message = v8.serialize({ type, payload }) + if (this._sendCommandSocketFrame(worker.id, 1, requestId, message)) { + return true + } + if (!worker?.isConnected()) return false + worker.send({ type, requestId, payload }) + return true + } + + _sendStreamCommand(worker, msg) { + if (!worker?.isConnected() && !this.commandSockets.has(worker.id)) + return false + if (this.workerReady.has(worker.id)) { + return this._sendWorkerCommand( + worker, + msg.type, + msg.requestId, + msg.payload + ) + } + + let attempts = 0 + const checkReady = setInterval(() => { + attempts++ + if (!worker.isConnected() && !this.commandSockets.has(worker.id)) { + clearInterval(checkReady) + return + } + if (this.workerReady.has(worker.id)) { + clearInterval(checkReady) + this._sendWorkerCommand(worker, msg.type, msg.requestId, msg.payload) + } else if (attempts > 50) { + clearInterval(checkReady) + } + }, 100) + return true + } + forkWorker() { if (this.workers.length >= this.maxWorkers) { logger( @@ -323,17 +689,19 @@ export default class WorkerManager { return null } - const worker = cluster.fork() + cluster.setupPrimary({ exec: './src/index.js' }) + const worker = cluster.fork({ + EVENT_SOCKET_PATH: this.socketPath, + COMMAND_SOCKET_PATH: this.commandSocketPath + }) + worker.workerType = 'playback' this.workers.push(worker) this.workersById.set(worker.id, worker) this.workerLoad.set(worker.id, 0) - this.workerStats.set(worker.id, { - players: 0, - playingPlayers: 0, - cpu: { nodelinkLoad: 0 }, - memory: { used: 0, allocated: 0 } - }) + + this.workerStats.set(worker.id, { players: 0, playingPlayers: 0 }) + this.workerToGuilds.set(worker.id, new Set()) this.workerHealth.set(worker.id, Date.now()) this.workerStartTime.set(worker.id, Date.now()) @@ -363,10 +731,14 @@ export default class WorkerManager { const worker = this.workersById.get(workerId) if (!worker) return + this._failStreamsForWorker(workerId) + this._removeCommandSocketByWorkerId(workerId) + const index = this.workers.indexOf(worker) if (index !== -1) this.workers.splice(index, 1) this.workersById.delete(workerId) + this.workerReady.delete(workerId) this.workerLoad.delete(workerId) this.workerStats.delete(workerId) this.idleWorkers.delete(workerId) @@ -423,15 +795,18 @@ export default class WorkerManager { } } + _removeCommandSocketByWorkerId(workerId) { + const socket = this.commandSockets.get(workerId) + if (!socket) return + this.commandSockets.delete(workerId) + try { + socket.destroy() + } catch {} + } + _handleWorkerMessage(worker, msg) { if (msg.type === 'commandResult') { - const callback = this.pendingRequests.get(msg.requestId) - if (callback) { - clearTimeout(callback.timeout) - this.pendingRequests.delete(msg.requestId) - if (msg.error) callback.reject(new Error(String(msg.error))) - else callback.resolve(msg.payload) - } + this._handleCommandResponse(msg.requestId, msg.payload, msg.error) } else if (msg.type === 'workerStats') { this.statsUpdateBatch.set(worker.id, msg.stats) @@ -444,25 +819,56 @@ export default class WorkerManager { this.workerHealth.set(worker.id, Date.now()) } else if (msg.type === 'ready') { this.workerHealth.set(worker.id, Date.now()) + this.workerReady.add(worker.id) logger( 'info', 'Cluster', `Worker ${worker.id} (PID ${worker.process.pid}) ready` ) - if (this.liveYoutubeConfig.refreshToken || this.liveYoutubeConfig.visitorData) { - logger('info', 'Cluster', `Syncing live YouTube config to new worker ${worker.id}`) - this.execute(worker, 'updateYoutubeConfig', this.liveYoutubeConfig) - .catch(err => logger('error', 'Cluster', `Failed to sync config to worker ${worker.id}: ${err.message}`)) + if ( + this.liveYoutubeConfig.refreshToken || + this.liveYoutubeConfig.visitorData + ) { + logger( + 'info', + 'Cluster', + `Syncing live YouTube config to new worker ${worker.id}` + ) + this.execute( + worker, + 'updateYoutubeConfig', + this.liveYoutubeConfig + ).catch((err) => + logger( + 'error', + 'Cluster', + `Failed to sync config to worker ${worker.id}: ${err.message}` + ) + ) } + } else if (msg.type === 'ready' && worker.onSourceReady) { + // This part might be handled by SourceWorkerManager if integrated deeper, + // but for now we keep WorkerManager clean of SourceWorker logic. } else if (global.nodelink) { global.nodelink.handleIPCMessage(msg) } } + _handleCommandResponse(requestId, payload, error) { + const callback = this.pendingRequests.get(requestId) + if (!callback) return + clearTimeout(callback.timeout) + this.pendingRequests.delete(requestId) + if (error) callback.reject(new Error(String(error))) + else callback.resolve(payload) + } + setLiveYoutubeConfig(config) { - if (config.refreshToken) this.liveYoutubeConfig.refreshToken = config.refreshToken - if (config.visitorData) this.liveYoutubeConfig.visitorData = config.visitorData + if (config.refreshToken) + this.liveYoutubeConfig.refreshToken = config.refreshToken + if (config.visitorData) + this.liveYoutubeConfig.visitorData = config.visitorData } _flushStatsUpdates() { @@ -502,18 +908,33 @@ export default class WorkerManager { } let bestWorker = null - let minLoad = Number.POSITIVE_INFINITY + let minCost = Number.POSITIVE_INFINITY for (const worker of this.workers) { if (worker.isConnected()) { - const load = this.workerLoad.get(worker.id) || 0 - if (load < minLoad) { - minLoad = load + const cost = this._calculateWorkerCost(worker.id) + if (cost < minCost) { + minCost = cost bestWorker = worker } } } + const threshold = this.scalingConfig.maxPlayersPerWorker + + if (minCost >= threshold && this.workers.length < this.maxWorkers) { + logger( + 'debug', + 'Cluster', + `Best worker is saturated (Cost: ${minCost.toFixed(2)}). Forking new worker.` + ) + const newWorker = this.forkWorker() + if (newWorker) { + this.assignGuildToWorker(playerKey, newWorker) + return newWorker + } + } + if (!bestWorker) { bestWorker = this.forkWorker() if (!bestWorker) { @@ -521,6 +942,33 @@ export default class WorkerManager { } } + // Warning logs if system is squeezed + if (minCost >= threshold) { + if (this.workers.length >= this.maxWorkers) { + logger( + 'warn', + 'Cluster', + '\x1b[31m! THIS SERVER IS OPERATING AT CRITICAL CAPACITY !\x1b[0m' + ) + logger( + 'warn', + 'Cluster', + '\x1b[31mIt is EXTREMELY RECOMMENDED that you scale your instance.\x1b[0m' + ) + logger( + 'warn', + 'Cluster', + '\x1b[31mIf this client serves a large volume of users or multiple bots, it is time to implement a server mesh for better performance.\x1b[0m' + ) + } else { + logger( + 'warn', + 'Cluster', + `Worker #${bestWorker.id} is operating under heavy load (squeezed) :p` + ) + } + } + this.assignGuildToWorker(playerKey, bestWorker) return bestWorker } @@ -609,7 +1057,7 @@ export default class WorkerManager { const lastHealthCheck = this.workerHealth.get(workerId) || 0 const startTime = this.workerStartTime.get(workerId) || now const uptimeSeconds = Math.floor((now - startTime) / 1000) - const isHealthy = (now - lastHealthCheck) < 30000 + const isHealthy = now - lastHealthCheck < 30000 workerMetrics[uniqueId] = { clusterId: workerId, @@ -624,6 +1072,7 @@ export default class WorkerManager { } destroy() { + this.isDestroying = true this._stopScalingCheck() this._stopHealthCheck() @@ -652,6 +1101,42 @@ export default class WorkerManager { } } + const streamIds = [] + for (const [streamId, request] of this.streamRequests) { + streamIds.push(streamId) + if (!request.res.headersSent) { + request.res.writeHead(503, { 'Content-Type': 'application/json' }) + request.res.end( + JSON.stringify({ + timestamp: Date.now(), + status: 503, + error: 'Service Unavailable', + message: 'Server shutting down.', + path: request.req.url + }) + ) + } else { + request.res.end() + } + } + + for (const streamId of streamIds) { + this._cleanupStreamRequest(streamId, false) + } + + for (const socket of this.commandSockets.values()) { + try { + socket.destroy() + } catch {} + } + this.commandSockets.clear() + + if (this.commandServer) { + try { + this.commandServer.close() + } catch {} + } + logger( 'info', 'Cluster', @@ -659,6 +1144,53 @@ export default class WorkerManager { ) } + delegateStream(req, res, payload, options = {}) { + const worker = this.getBestWorker() + if (!worker) return false + + const streamId = crypto.randomBytes(16).toString('hex') + const request = { + id: streamId, + req, + res, + workerId: worker.id, + options, + timeout: null, + cleaned: false + } + + request.timeout = setTimeout(() => { + const activeRequest = this.streamRequests.get(streamId) + if (activeRequest) { + res.writeHead(504, { 'Content-Type': 'application/json' }) + res.end( + JSON.stringify({ + error: 'Gateway Timeout', + message: 'Stream worker timed out' + }) + ) + this._cleanupStreamRequest(streamId, true) + } + }, 60000) + + this.streamRequests.set(streamId, request) + + res.on('close', () => { + this._cleanupStreamRequest(streamId, true) + }) + + this._sendStreamCommand(worker, { + type: 'loadStream', + requestId: streamId, + payload: { + ...payload, + streamId + } + }) + + return true + } + execute(worker, type, payload, options = {}) { return new Promise((resolve, reject) => { this._executeCommand( @@ -685,7 +1217,10 @@ export default class WorkerManager { global.nodelink.statsManager.incrementCommandTimeout(type) } - if (retryCount < this.maxRetries && worker.isConnected()) { + if ( + retryCount < this.maxRetries && + (worker.isConnected() || this.commandSockets.has(worker.id)) + ) { logger( 'warn', 'Cluster', @@ -740,7 +1275,7 @@ export default class WorkerManager { }) try { - if (!worker.isConnected()) { + if (!worker.isConnected() && !this.commandSockets.has(worker.id)) { clearTimeout(timeout) this.pendingRequests.delete(requestId) @@ -757,7 +1292,7 @@ export default class WorkerManager { isFast ) } else { - reject(new Error('No workers available')) + reject(new Error('No workers available for retry')) } } else { reject(new Error('Worker disconnected and max retries reached')) @@ -765,7 +1300,40 @@ export default class WorkerManager { return } - worker.send({ type, requestId, payload }) + if (!this.workerReady.has(worker.id)) { + logger( + 'debug', + 'Cluster', + `Waiting for worker ${worker.id} to be ready before sending ${type}` + ) + let attempts = 0 + const checkReady = setInterval(() => { + attempts++ + if ( + this.workerReady.has(worker.id) || + (!worker.isConnected() && !this.commandSockets.has(worker.id)) + ) { + clearInterval(checkReady) + if ( + this.workerReady.has(worker.id) && + (worker.isConnected() || this.commandSockets.has(worker.id)) + ) { + if (!this._sendWorkerCommand(worker, type, requestId, payload)) { + clearTimeout(timeout) + this.pendingRequests.delete(requestId) + reject(new Error('No transport available for worker command')) + } + } + } else if (attempts > 50) { + clearInterval(checkReady) + } + }, 100) + return + } + + if (!this._sendWorkerCommand(worker, type, requestId, payload)) { + throw new Error('No transport available for worker command') + } } catch (error) { clearTimeout(timeout) this.pendingRequests.delete(requestId) @@ -796,4 +1364,4 @@ export default class WorkerManager { } } } -} \ No newline at end of file +} diff --git a/src/playback/AudioMixer.js b/src/playback/AudioMixer.js index fd0a308..5fb59c1 100644 --- a/src/playback/AudioMixer.js +++ b/src/playback/AudioMixer.js @@ -1,6 +1,8 @@ -import { EventEmitter } from 'node:events' import { randomBytes } from 'node:crypto' +import { EventEmitter } from 'node:events' +import { RingBuffer } from './RingBuffer.js' +const LAYER_BUFFER_SIZE = 1024 * 1024 // 1MB per layer (~5 seconds of PCM) export class AudioMixer extends EventEmitter { constructor(config = {}) { @@ -10,45 +12,43 @@ export class AudioMixer extends EventEmitter { this.defaultVolume = config.defaultVolume || 0.8 this.autoCleanup = config.autoCleanup !== false this.enabled = config.enabled !== false + this.outputBuffer = Buffer.allocUnsafe(3840) } - mixBuffers(mainPCM, layersPCM) { if (layersPCM.size === 0 || !this.enabled) { return mainPCM } + if (this.outputBuffer.length < mainPCM.length) { + this.outputBuffer = Buffer.allocUnsafe(mainPCM.length) + } - const output = Buffer.allocUnsafe(mainPCM.length) - for (let i = 0; i < mainPCM.length; i += 2) { let mainSample = mainPCM.readInt16LE(i) - + for (const layer of layersPCM.values()) { if (i < layer.buffer.length) { const layerSample = layer.buffer.readInt16LE(i) mainSample += Math.floor(layerSample * layer.volume) } } - + mainSample = Math.max(-32768, Math.min(32767, mainSample)) - output.writeInt16LE(mainSample, i) + this.outputBuffer.writeInt16LE(mainSample, i) } - - return output - } + return this.outputBuffer.subarray(0, mainPCM.length) + } addLayer(stream, track, volume = null) { if (this.mixLayers.size >= this.maxLayers) { throw new Error(`Maximum mix layers (${this.maxLayers}) reached`) } - const id = randomBytes(8).toString('hex') const actualVolume = volume !== null ? volume : this.defaultVolume - const layer = { id, stream, @@ -57,128 +57,109 @@ export class AudioMixer extends EventEmitter { position: 0, startTime: Date.now(), active: true, - currentBuffer: Buffer.alloc(0), + ringBuffer: new RingBuffer(LAYER_BUFFER_SIZE), receivedBytes: 0, emptyReads: 0 } - this.mixLayers.set(id, layer) - stream.on('data', (chunk) => { if (layer.active) { layer.receivedBytes += chunk.length - layer.currentBuffer = Buffer.concat([layer.currentBuffer, chunk]) + layer.ringBuffer.write(chunk) layer.emptyReads = 0 } }) - stream.once('error', (error) => { this.emit('mixError', { id, error }) this.removeLayer(id, 'ERROR') }) - this.emit('mixStarted', { id, track, volume: layer.volume }) - return id } - readLayerChunks(chunkSize) { const layerChunks = new Map() - for (const [id, layer] of this.mixLayers.entries()) { - if (layer.currentBuffer.length === 0) { + if (layer.ringBuffer.length === 0) { layer.emptyReads++ - + if (layer.emptyReads >= 3 && layer.receivedBytes > 0) { this.removeLayer(id, 'FINISHED') } continue } - if (!layer.active) { continue } + const chunk = layer.ringBuffer.read(chunkSize) + if (!chunk) continue - const readSize = Math.min(chunkSize, layer.currentBuffer.length) - const chunk = layer.currentBuffer.subarray(0, readSize) - layer.currentBuffer = layer.currentBuffer.subarray(readSize) layer.emptyReads = 0 - + layerChunks.set(id, { buffer: chunk, volume: layer.volume }) - - layer.position += readSize + layer.position += chunk.length } - return layerChunks } - hasActiveLayers() { return this.mixLayers.size > 0 } - removeLayer(id, reason = 'REMOVED') { const layer = this.mixLayers.get(id) if (!layer) { return false } - layer.active = false - + if (layer.stream && !layer.stream.destroyed) { layer.stream.destroy() } + layer.ringBuffer.dispose() this.mixLayers.delete(id) - this.emit('mixEnded', { id, reason }) - return true } - updateLayerVolume(id, volume) { const layer = this.mixLayers.get(id) if (!layer) { return false } - layer.volume = Math.max(0, Math.min(1, volume)) return true } - getLayer(id) { const layer = this.mixLayers.get(id) if (!layer) { return null } - return { id: layer.id, track: layer.track, @@ -188,9 +169,8 @@ export class AudioMixer extends EventEmitter { } } - getLayers() { - return Array.from(this.mixLayers.values()).map(layer => ({ + return Array.from(this.mixLayers.values()).map((layer) => ({ id: layer.id, track: layer.track, volume: layer.volume, @@ -199,15 +179,13 @@ export class AudioMixer extends EventEmitter { })) } - clearLayers(reason = 'CLEARED') { const ids = Array.from(this.mixLayers.keys()) - + for (const id of ids) { this.removeLayer(id, reason) } - return ids.length } } diff --git a/src/playback/BufferPool.js b/src/playback/BufferPool.js new file mode 100644 index 0000000..e926e29 --- /dev/null +++ b/src/playback/BufferPool.js @@ -0,0 +1,70 @@ +import { logger } from '../utils.js' + +const MAX_POOL_SIZE_BYTES = 50 * 1024 * 1024 +const CLEANUP_INTERVAL = 60000 + +class BufferPool { + constructor() { + this.pools = new Map() + this.totalBytes = 0 + + this.cleanupInterval = setInterval(() => this._cleanup(), CLEANUP_INTERVAL) + this.cleanupInterval.unref() + } + + _getAlignedSize(size) { + if (size <= 1024) return 1024 + let n = size - 1 + n |= n >> 1 + n |= n >> 2 + n |= n >> 4 + n |= n >> 8 + n |= n >> 16 + return n + 1 + } + + acquire(size) { + const alignedSize = this._getAlignedSize(size) + const pool = this.pools.get(alignedSize) + if (pool && pool.length > 0) { + const buffer = pool.pop() + this.totalBytes -= alignedSize + return buffer + } + return Buffer.allocUnsafe(alignedSize) + } + + release(buffer) { + if (!Buffer.isBuffer(buffer)) return + + const size = buffer.length + + if (size < 1024 || size > 10 * 1024 * 1024) return + + if (this.totalBytes + size > MAX_POOL_SIZE_BYTES) { + return + } + + if (!this.pools.has(size)) { + this.pools.set(size, []) + } + + this.pools.get(size).push(buffer) + this.totalBytes += size + } + + clear() { + this.pools.clear() + this.totalBytes = 0 + } + + _cleanup() { + if (this.totalBytes > MAX_POOL_SIZE_BYTES) { + this.pools.clear() + this.totalBytes = 0 + logger('debug', 'BufferPool', 'Pool cleared due to size limit.') + } + } +} + +export const bufferPool = new BufferPool() diff --git a/src/playback/RingBuffer.js b/src/playback/RingBuffer.js new file mode 100644 index 0000000..c78998c --- /dev/null +++ b/src/playback/RingBuffer.js @@ -0,0 +1,114 @@ +import { bufferPool } from './BufferPool.js' + +export class RingBuffer { + constructor(size) { + this.buffer = bufferPool.acquire(size) + this.size = size + this.writeOffset = 0 + this.readOffset = 0 + this.length = 0 + } + + dispose() { + if (this.buffer) { + bufferPool.release(this.buffer) + this.buffer = null + } + } + + write(chunk) { + const bytesToWrite = chunk.length + const availableAtEnd = this.size - this.writeOffset + + if (bytesToWrite <= availableAtEnd) { + chunk.copy(this.buffer, this.writeOffset) + } else { + chunk.copy(this.buffer, this.writeOffset, 0, availableAtEnd) + chunk.copy(this.buffer, 0, availableAtEnd) + } + + const newLength = this.length + bytesToWrite + if (newLength > this.size) { + this.readOffset = (this.readOffset + (newLength - this.size)) % this.size + this.length = this.size + } else { + this.length = newLength + } + this.writeOffset = (this.writeOffset + bytesToWrite) % this.size + } + + read(n) { + const bytesToReadNum = Math.min(Math.max(0, n), this.length) + if (bytesToReadNum === 0) return null + const out = Buffer.allocUnsafe(bytesToReadNum) + + const availableAtEnd = this.size - this.readOffset + if (bytesToReadNum <= availableAtEnd) { + this.buffer.copy( + out, + 0, + this.readOffset, + this.readOffset + bytesToReadNum + ) + } else { + this.buffer.copy(out, 0, this.readOffset, this.size) + this.buffer.copy(out, availableAtEnd, 0, bytesToReadNum - availableAtEnd) + } + + this.readOffset = (this.readOffset + bytesToReadNum) % this.size + this.length -= bytesToReadNum + return out + } + + skip(n) { + const skipAmount = Math.max(0, n) + const bytesToSkip = Math.min(skipAmount, this.length) + this.readOffset = (this.readOffset + bytesToSkip) % this.size + this.length -= bytesToSkip + return bytesToSkip + } + + peek(n) { + const bytesToPeekNum = Math.min(Math.max(0, n), this.length) + if (bytesToPeekNum === 0) return null + const out = Buffer.allocUnsafe(bytesToPeekNum) + + const availableAtEnd = this.size - this.readOffset + if (bytesToPeekNum <= availableAtEnd) { + this.buffer.copy( + out, + 0, + this.readOffset, + this.readOffset + bytesToPeekNum + ) + } else { + this.buffer.copy(out, 0, this.readOffset, this.size) + this.buffer.copy(out, availableAtEnd, 0, bytesToPeekNum - availableAtEnd) + } + return out + } + + getContiguous(n) { + const bytesToPeekNum = Math.min(Math.max(0, n), this.length) + if (bytesToPeekNum === 0) return null + const availableAtEnd = this.size - this.readOffset + + if (bytesToPeekNum <= availableAtEnd) { + return this.buffer.subarray( + this.readOffset, + this.readOffset + bytesToPeekNum + ) + } + + const out = Buffer.allocUnsafe(bytesToPeekNum) + this.buffer.copy(out, 0, this.readOffset, this.size) + this.buffer.copy(out, availableAtEnd, 0, bytesToPeekNum - availableAtEnd) + return out + } + + clear() { + this.writeOffset = 0 + this.readOffset = 0 + this.length = 0 + } +} diff --git a/src/playback/VolumeTransformer.js b/src/playback/VolumeTransformer.js index 11f94dc..201d4de 100644 --- a/src/playback/VolumeTransformer.js +++ b/src/playback/VolumeTransformer.js @@ -1,8 +1,13 @@ import { Transform } from 'node:stream' -import { clamp16Bit } from './filters/dsp/clamp16Bit.js' const FADE_FRAMES = 50 // 50 frames * 20ms/frame = 1 second fade +const VOLUME_LUT = new Int32Array(151) +for (let i = 0; i <= 150; i++) { + const floatMultiplier = Math.tan(i * 0.0079) + VOLUME_LUT[i] = Math.floor(floatMultiplier * 10000) +} + export class VolumeTransformer extends Transform { constructor(options = {}) { super({ highWaterMark: 3840, ...options }) @@ -12,12 +17,13 @@ export class VolumeTransformer extends Transform { this.fadeProgress = FADE_FRAMES this.integerMultiplier = 10000 + this.lastVolumePercent = null } _setupMultipliers(activeVolumePercent) { - if (activeVolumePercent <= 150) { - const floatMultiplier = Math.tan(activeVolumePercent * 0.0079) - this.integerMultiplier = Math.floor(floatMultiplier * 10000) + const roundedPercent = Math.round(activeVolumePercent) + if (roundedPercent <= 150) { + this.integerMultiplier = VOLUME_LUT[Math.max(0, roundedPercent)] } else { this.integerMultiplier = Math.floor((24621 * activeVolumePercent) / 150) } @@ -53,12 +59,22 @@ export class VolumeTransformer extends Transform { return callback() } - this._setupMultipliers(volumePercent) + if (volumePercent !== this.lastVolumePercent) { + this._setupMultipliers(volumePercent) + this.lastVolumePercent = volumePercent + } + + const samples = new Int16Array( + chunk.buffer, + chunk.byteOffset, + chunk.length / 2 + ) + const multiplier = this.integerMultiplier - for (let i = 0; i < chunk.length; i += 2) { - const sample = chunk.readInt16LE(i) - const value = (sample * this.integerMultiplier) / 10000 - chunk.writeInt16LE(clamp16Bit(value), i) + for (let i = 0; i < samples.length; i++) { + const value = (samples[i] * multiplier) / 10000 + samples[i] = + value < -32768 ? -32768 : value > 32767 ? 32767 : Math.round(value) } this.push(chunk) diff --git a/src/playback/demuxers/Flv.js b/src/playback/demuxers/Flv.js new file mode 100644 index 0000000..ca63a05 --- /dev/null +++ b/src/playback/demuxers/Flv.js @@ -0,0 +1,71 @@ +import { Transform } from 'node:stream' +import { logger } from '../../utils.js' +import { RingBuffer } from '../RingBuffer.js' + +const STATE_HEADER = 0 +const STATE_TAG_HEADER = 1 +const STATE_TAG_BODY = 2 + +const TAG_TYPE_AUDIO = 8 +const BUFFER_SIZE = 2 * 1024 * 1024 // 2MB + +export class FlvDemuxer extends Transform { + constructor(options = {}) { + super({ ...options, readableObjectMode: true }) + this.on('error', (err) => + logger( + 'error', + 'FlvDemuxer', + `Stream error: ${err.message} (${err.code})` + ) + ) + this.ringBuffer = new RingBuffer(BUFFER_SIZE) + this.state = STATE_HEADER + this.expectedSize = 9 + this.currentTag = null + } + + _transform(chunk, _encoding, callback) { + this.ringBuffer.write(chunk) + + while (this.ringBuffer.length >= this.expectedSize) { + if (this.state === STATE_HEADER) { + const header = this.ringBuffer.peek(3) + if (header.toString('ascii') !== 'FLV') { + return callback(new Error('Invalid FLV header')) + } + this.ringBuffer.read(13) + this.state = STATE_TAG_HEADER + this.expectedSize = 11 + } else if (this.state === STATE_TAG_HEADER) { + const header = this.ringBuffer.read(11) + const type = header.readUInt8(0) + const size = header.readUIntBE(1, 3) + + this.currentTag = { type, size } + this.state = STATE_TAG_BODY + this.expectedSize = size + 4 + } else if (this.state === STATE_TAG_BODY) { + const body = this.ringBuffer.read(this.currentTag.size) + // Skip PreviousTagSize (4 bytes) + this.ringBuffer.read(4) + + if (this.currentTag.type === TAG_TYPE_AUDIO) { + this.push(body) + } + + this.state = STATE_TAG_HEADER + this.expectedSize = 11 + } + } + + callback() + } + + _destroy(err, cb) { + this.ringBuffer.dispose() + cb(err) + } +} + +export default FlvDemuxer diff --git a/src/playback/demuxers/WebmOpus.js b/src/playback/demuxers/WebmOpus.js index ecdbe6a..f3d9d7f 100644 --- a/src/playback/demuxers/WebmOpus.js +++ b/src/playback/demuxers/WebmOpus.js @@ -1,23 +1,34 @@ import { Transform } from 'node:stream' +import { logger } from '../../utils.js' +import { RingBuffer } from '../RingBuffer.js' const TOO_SHORT = Symbol('TOO_SHORT') +const INVALID_VINT = Symbol('INVALID_VINT') +const BUFFER_SIZE = 2 * 1024 * 1024 const TAGS = Object.freeze({ '1a45dfa3': true, 18538067: true, '1f43b675': true, '1654ae6b': true, + '1c53bb6b': false, + '1254c367': false, ae: true, d7: false, 83: false, a3: false, - '63a2': false + '63a2': false, + e7: false, + a0: true, + a1: false }) const OPUS_HEAD = Buffer.from([0x4f, 0x70, 0x75, 0x73, 0x48, 0x65, 0x61, 0x64]) +const MAX_TAG_SIZE = 10 * 1024 * 1024 const readVintLength = (buf, i) => { if (i < 0 || i >= buf.length) return TOO_SHORT + if (buf[i] === 0) return INVALID_VINT let n = 0 for (; n < 8; n++) if ((1 << (7 - n)) & buf[i]) break n++ @@ -26,19 +37,27 @@ const readVintLength = (buf, i) => { const readVint = (buf, start, end) => { const len = readVintLength(buf, start) - if (len === TOO_SHORT || end > buf.length) return TOO_SHORT - let mask = (1 << (8 - len)) - 1 - let value = buf[start] & mask - for (let i = start + 1; i < end; i++) value = (value << 8) | buf[i] + if (len === TOO_SHORT || len === INVALID_VINT || end > buf.length) + return TOO_SHORT + const mask = (1 << (8 - len)) - 1 + let value = BigInt(buf[start] & mask) + for (let i = start + 1; i < end; i++) value = (value << 8n) | BigInt(buf[i]) return value } class WebmBaseDemuxer extends Transform { constructor(options = {}) { super({ readableObjectMode: true, ...options }) - this.remainder = null - this.total = 0 - this.processed = 0 + this.on('error', (err) => + logger( + 'error', + 'WebmDemuxer', + `Stream error: ${err.message} (${err.code})` + ) + ) + this.ringBuffer = new RingBuffer(BUFFER_SIZE) + this.total = 0n + this.processed = 0n this.skipUntil = null this.currentTrack = null this.pendingTrack = {} @@ -48,44 +67,63 @@ class WebmBaseDemuxer extends Transform { _transform(chunk, _, done) { if (!chunk?.length) return done() - this.total += chunk.length - if (this.remainder) { - chunk = Buffer.concat([this.remainder, chunk]) - this.remainder = null - } + this.ringBuffer.write(chunk) + this.total += BigInt(chunk.length) + + if (this.skipUntil !== null) { + const remainingToSkip = this.skipUntil - this.processed + const bufferLen = BigInt(this.ringBuffer.length) + const toSkip = remainingToSkip < bufferLen ? remainingToSkip : bufferLen - let offset = 0 - if (this.skipUntil && this.total > this.skipUntil) { - offset = this.skipUntil - this.processed + if (toSkip > 0n) { + const skipNum = + toSkip > BigInt(Number.MAX_SAFE_INTEGER) + ? Number.MAX_SAFE_INTEGER + : Number(toSkip) + this.ringBuffer.skip(skipNum) + this.processed += BigInt(skipNum) + } + if (this.processed < this.skipUntil) return done() this.skipUntil = null - } else if (this.skipUntil) { - this.processed += chunk.length - done() - return } - let res - while (res !== TOO_SHORT) { + while (true) { + const currentData = this.ringBuffer.getContiguous(this.ringBuffer.length) + if (!currentData) break + + let res try { - res = this._readTag(chunk, offset) + res = this._readTag(currentData, 0) } catch (err) { + logger('error', 'WebmDemuxer', `Error in _readTag: ${err.message}`) done(err) return } + if (res === TOO_SHORT) break + if (res._skipUntil) { this.skipUntil = res._skipUntil + this.ringBuffer.skip(this.ringBuffer.length) + this.processed += BigInt(this.ringBuffer.length) break } - if (res.offset) offset = res.offset - else break - } - this.processed += offset - this.remainder = offset < chunk.length ? chunk.subarray(offset) : null + if (res.offset) { + const offset = BigInt(res.offset) + const skipNum = + offset > BigInt(Number.MAX_SAFE_INTEGER) + ? Number.MAX_SAFE_INTEGER + : Number(offset) + this.ringBuffer.skip(skipNum) + this.processed += BigInt(skipNum) + } else { + break + } + } - if (this.total > 1e9 && !this.skipUntil) { - this.total = this.processed = 0 + if (this.total > 1000000000n && !this.skipUntil) { + this.total = this.processed = 0n } done() @@ -94,43 +132,75 @@ class WebmBaseDemuxer extends Transform { _readEBMLId(chunk, offset) { const len = readVintLength(chunk, offset) if (len === TOO_SHORT) return TOO_SHORT + if (len === INVALID_VINT) return INVALID_VINT return { id: chunk.subarray(offset, offset + len), offset: offset + len } } _readTagSize(chunk, offset) { const len = readVintLength(chunk, offset) if (len === TOO_SHORT) return TOO_SHORT + if (len === INVALID_VINT) return INVALID_VINT const dataLen = readVint(chunk, offset, offset + len) - return { offset: offset + len, dataLen } + return { offset: offset + len, dataLen, vintLen: len } } _readTag(chunk, offset) { const idData = this._readEBMLId(chunk, offset) if (idData === TOO_SHORT) return TOO_SHORT + if (idData === INVALID_VINT) { + return { offset: 1 } + } const tag = idData.id.toString('hex') if (!this.ebmlFound) { - if (tag === '1a45dfa3') this.ebmlFound = true - else throw new Error('Invalid WebM: missing EBML header') + if (tag === '1a45dfa3' || tag === '1f43b675') { + logger('debug', 'WebmDemuxer', `Header found: ${tag}`) + this.ebmlFound = true + } else { + return { offset: 1 } + } } - offset = idData.offset - const sizeData = this._readTagSize(chunk, offset) + let currentOffset = idData.offset + const sizeData = this._readTagSize(chunk, currentOffset) if (sizeData === TOO_SHORT) return TOO_SHORT + if (sizeData === INVALID_VINT) { + return { offset: 1 } + } + + const { dataLen, vintLen } = sizeData + + if (tag !== '18538067' && dataLen > BigInt(MAX_TAG_SIZE)) { + const isUnknownSize = dataLen === 2n ** BigInt(7 * vintLen) - 1n + if (!isUnknownSize) { + return { offset: 1 } + } + } - const { dataLen } = sizeData - offset = sizeData.offset + currentOffset = sizeData.offset if (!(tag in TAGS)) { - if (chunk.length > offset + dataLen) return { offset: offset + dataLen } - return { offset, _skipUntil: this.processed + offset + dataLen } + const isUnknownSize = dataLen === 2n ** BigInt(7 * vintLen) - 1n + const numDataLen = Number(dataLen) + + if (isUnknownSize) { + return { offset: 1 } + } + + if (chunk.length > currentOffset + numDataLen) + return { offset: currentOffset + numDataLen } + return { + offset: currentOffset, + _skipUntil: this.processed + BigInt(currentOffset + numDataLen) + } } const hasChildren = TAGS[tag] - if (hasChildren) return { offset } + if (hasChildren) return { offset: currentOffset } - if (offset + dataLen > chunk.length) return TOO_SHORT - const data = chunk.subarray(offset, offset + dataLen) + const numDataLen = Number(dataLen) + if (currentOffset + numDataLen > chunk.length) return TOO_SHORT + const data = chunk.subarray(currentOffset, currentOffset + numDataLen) if (!this.currentTrack) { if (tag === 'ae') this.pendingTrack = {} @@ -144,20 +214,21 @@ class WebmBaseDemuxer extends Transform { } if (tag === '63a2') { - this._checkHead(data) - this.emit('head', data) + try { + this._checkHead(data) + this.emit('head', data) + } catch (_e) {} } else if (tag === 'a3') { - if (!this.currentTrack) throw new Error('No valid audio track found') - if ((data[0] & 0xf) === this.currentTrack.number) + if (this.currentTrack && (data[0] & 0xf) === this.currentTrack.number) { this.push(data.subarray(4)) + } } - return { offset: offset + dataLen } + return { offset: currentOffset + numDataLen } } _destroy(err, cb) { this._cleanup() - this.removeAllListeners() cb?.(err) } @@ -167,7 +238,7 @@ class WebmBaseDemuxer extends Transform { } _cleanup() { - this.remainder = null + this.ringBuffer.dispose() this.pendingTrack = {} this.currentTrack = null this.ebmlFound = false diff --git a/src/playback/filters/chorus.js b/src/playback/filters/chorus.js index e9113cc..843bd82 100644 --- a/src/playback/filters/chorus.js +++ b/src/playback/filters/chorus.js @@ -98,4 +98,14 @@ export default class Chorus { return chunk } + + clear() { + for (const delay of this.delays) { + delay.clear() + } + this.lfos[0].phase = 0 + this.lfos[1].phase = Math.PI / 2 + this.lfos[2].phase = Math.PI + this.lfos[3].phase = (3 * Math.PI) / 2 + } } diff --git a/src/playback/filters/echo.js b/src/playback/filters/echo.js index 7e698c7..3417ee9 100644 --- a/src/playback/filters/echo.js +++ b/src/playback/filters/echo.js @@ -53,4 +53,9 @@ export default class Echo { return chunk } + + clear() { + this.leftDelay.clear() + this.rightDelay.clear() + } } diff --git a/src/playback/filters/flanger.js b/src/playback/filters/flanger.js index 57bf892..cb7264a 100644 --- a/src/playback/filters/flanger.js +++ b/src/playback/filters/flanger.js @@ -1,92 +1,95 @@ -import { SAMPLE_RATE } from '../../constants.js' -import { clamp16Bit } from './dsp/clamp16Bit.js' -import DelayLine from './dsp/delay.js' -import LFO from './dsp/lfo.js' - -const MAX_DELAY_MS = 15 -const bufferSize = Math.ceil((SAMPLE_RATE * MAX_DELAY_MS) / 1000) - -export default class Flanger { - constructor() { - this.priority = 10 - - this.delayLeft = new DelayLine(bufferSize) - this.delayRight = new DelayLine(bufferSize) - - this.lfoLeft = new LFO('SINE') - this.lfoRight = new LFO('SINE') - this.lfoRight.phase = Math.PI / 4 - - this.rate = 0 - this.depth = 0 - this.delay = 5 - this.feedback = 0 - this.mix = 0.5 - - this.lastLeftOutput = 0 - this.lastRightOutput = 0 - } - - update(filters) { - const settings = filters.flanger || {} - - this.rate = Math.max(0, Math.min(settings.rate || 0, 10)) - - this.depth = Math.max(0, Math.min(settings.depth || 0, 1.0)) - - this.delay = Math.max( - 1, - Math.min(settings.delay || 5, MAX_DELAY_MS - 5) - ) - - this.feedback = Math.max( - -0.95, - Math.min(settings.feedback || 0, 0.95) - ) - - this.mix = Math.max(0, Math.min(settings.mix || 0.5, 1.0)) - - this.lfoLeft.update(this.rate, this.depth) - this.lfoRight.update(this.rate, this.depth) - } - - process(chunk) { - if (this.rate === 0 || this.depth === 0 || this.mix === 0) { - return chunk - } - - const baseDelaySamples = (this.delay * SAMPLE_RATE) / 1000 - const maxModulation = this.depth * (SAMPLE_RATE * 0.003) - - for (let i = 0; i < chunk.length; i += 4) { - const leftInput = chunk.readInt16LE(i) - const rightInput = chunk.readInt16LE(i + 2) - - const lfoValueLeft = this.lfoLeft.getValue() - const lfoValueRight = this.lfoRight.getValue() - - const delayTimeLeft = baseDelaySamples + lfoValueLeft * maxModulation - const delayTimeRight = baseDelaySamples + lfoValueRight * maxModulation - - const delayedLeft = this.delayLeft.read(delayTimeLeft) - const delayedRight = this.delayRight.read(delayTimeRight) - - const wetLeft = delayedLeft + this.lastLeftOutput * this.feedback - const wetRight = delayedRight + this.lastRightOutput * this.feedback - - const outputLeft = leftInput * (1.0 - this.mix) + wetLeft * this.mix - const outputRight = rightInput * (1.0 - this.mix) + wetRight * this.mix - - this.lastLeftOutput = wetLeft - this.lastRightOutput = wetRight - - this.delayLeft.write(clamp16Bit(leftInput + wetLeft * this.feedback)) - this.delayRight.write(clamp16Bit(rightInput + wetRight * this.feedback)) - - chunk.writeInt16LE(clamp16Bit(outputLeft), i) - chunk.writeInt16LE(clamp16Bit(outputRight), i + 2) - } - - return chunk - } -} +import { SAMPLE_RATE } from '../../constants.js' +import { clamp16Bit } from './dsp/clamp16Bit.js' +import DelayLine from './dsp/delay.js' +import LFO from './dsp/lfo.js' + +const MAX_DELAY_MS = 15 +const bufferSize = Math.ceil((SAMPLE_RATE * MAX_DELAY_MS) / 1000) + +export default class Flanger { + constructor() { + this.priority = 10 + + this.delayLeft = new DelayLine(bufferSize) + this.delayRight = new DelayLine(bufferSize) + + this.lfoLeft = new LFO('SINE') + this.lfoRight = new LFO('SINE') + this.lfoRight.phase = Math.PI / 4 + + this.rate = 0 + this.depth = 0 + this.delay = 5 + this.feedback = 0 + this.mix = 0.5 + + this.lastLeftOutput = 0 + this.lastRightOutput = 0 + } + + update(filters) { + const settings = filters.flanger || {} + + this.rate = Math.max(0, Math.min(settings.rate || 0, 10)) + + this.depth = Math.max(0, Math.min(settings.depth || 0, 1.0)) + + this.delay = Math.max(1, Math.min(settings.delay || 5, MAX_DELAY_MS - 5)) + + this.feedback = Math.max(-0.95, Math.min(settings.feedback || 0, 0.95)) + + this.mix = Math.max(0, Math.min(settings.mix || 0.5, 1.0)) + + this.lfoLeft.update(this.rate, this.depth) + this.lfoRight.update(this.rate, this.depth) + } + + process(chunk) { + if (this.rate === 0 || this.depth === 0 || this.mix === 0) { + return chunk + } + + const baseDelaySamples = (this.delay * SAMPLE_RATE) / 1000 + const maxModulation = this.depth * (SAMPLE_RATE * 0.003) + + for (let i = 0; i < chunk.length; i += 4) { + const leftInput = chunk.readInt16LE(i) + const rightInput = chunk.readInt16LE(i + 2) + + const lfoValueLeft = this.lfoLeft.getValue() + const lfoValueRight = this.lfoRight.getValue() + + const delayTimeLeft = baseDelaySamples + lfoValueLeft * maxModulation + const delayTimeRight = baseDelaySamples + lfoValueRight * maxModulation + + const delayedLeft = this.delayLeft.read(delayTimeLeft) + const delayedRight = this.delayRight.read(delayTimeRight) + + const wetLeft = delayedLeft + this.lastLeftOutput * this.feedback + const wetRight = delayedRight + this.lastRightOutput * this.feedback + + const outputLeft = leftInput * (1.0 - this.mix) + wetLeft * this.mix + const outputRight = rightInput * (1.0 - this.mix) + wetRight * this.mix + + this.lastLeftOutput = wetLeft + this.lastRightOutput = wetRight + + this.delayLeft.write(clamp16Bit(leftInput + wetLeft * this.feedback)) + this.delayRight.write(clamp16Bit(rightInput + wetRight * this.feedback)) + + chunk.writeInt16LE(clamp16Bit(outputLeft), i) + chunk.writeInt16LE(clamp16Bit(outputRight), i + 2) + } + + return chunk + } + + clear() { + this.delayLeft.clear() + this.delayRight.clear() + this.lastLeftOutput = 0 + this.lastRightOutput = 0 + this.lfoLeft.phase = 0 + this.lfoRight.phase = Math.PI / 4 + } +} diff --git a/src/playback/filters/karaoke.js b/src/playback/filters/karaoke.js index 9872ecd..41ffe8c 100644 --- a/src/playback/filters/karaoke.js +++ b/src/playback/filters/karaoke.js @@ -2,6 +2,8 @@ import { SAMPLE_RATE } from '../../constants.js' import { clamp16Bit } from './dsp/clamp16Bit.js' const MAX_OUTPUT_GAIN = 0.98 +const SCALE_16 = 32768 +const INV_16 = 1 / SCALE_16 export default class Karaoke { constructor() { @@ -14,6 +16,16 @@ export default class Karaoke { this.lp_b0 = this.lp_b1 = this.lp_b2 = this.lp_a1 = this.lp_a2 = 0 this.hp_b0 = this.hp_b1 = this.hp_b2 = this.hp_a1 = this.hp_a2 = 0 + this._prevGain = MAX_OUTPUT_GAIN + this._bufL = null + this._bufR = null + this._bufFrames = 0 + + this._resetFilterState() + this.updateCoefficients() + } + + _resetFilterState() { this.lp_left_x1 = this.lp_left_x2 = this.lp_left_y1 = this.lp_left_y2 = 0 this.lp_right_x1 = this.lp_right_x2 = @@ -26,84 +38,89 @@ export default class Karaoke { this.hp_right_y1 = this.hp_right_y2 = 0 + } - this._prevGain = MAX_OUTPUT_GAIN - this._inv32768 = 1 / 32768 + _ensureBuffers(frames) { + if (frames <= this._bufFrames) return + this._bufFrames = frames + this._bufL = new Float32Array(frames) + this._bufR = new Float32Array(frames) } updateCoefficients() { - if (this.filterBand === 0 || this.filterWidth === 0) { + const band = this.filterBand + const widthIn = this.filterWidth + + if (!band || !widthIn) { this.lp_b0 = this.hp_b0 = 1 this.lp_b1 = this.lp_b2 = this.lp_a1 = this.lp_a2 = 0 this.hp_b1 = this.hp_b2 = this.hp_a1 = this.hp_a2 = 0 return } - const fc = Math.max(1, Math.min(SAMPLE_RATE * 0.49, this.filterBand)) - const width = Math.max(1e-6, this.filterWidth) - const Q = Math.max(0.0001, fc / width) - - const preWarp = Math.tan(Math.PI * (fc / SAMPLE_RATE)) - const omegaRatio = preWarp / (1 + preWarp * preWarp) - const sinTerm = Math.min(1, Math.max(-1, omegaRatio * 2)) - const alpha = Math.abs(Math.sin(sinTerm)) / Math.max(1e-12, 2 * Q) - const cosOmega0 = Math.cos(2 * Math.PI * (fc / SAMPLE_RATE)) - - this.lp_b0 = (1 - cosOmega0) / 2 - this.lp_b1 = 1 - cosOmega0 - this.lp_b2 = (1 - cosOmega0) / 2 - let lpA0 = 1 + alpha - if (Math.abs(lpA0) < 1e-12) lpA0 = 1e-12 - this.lp_a1 = (-2 * cosOmega0) / lpA0 - this.lp_a2 = (1 - alpha) / lpA0 - this.lp_b0 /= lpA0 - this.lp_b1 /= lpA0 - this.lp_b2 /= lpA0 - - this.hp_b0 = (1 + cosOmega0) / 2 - this.hp_b1 = -(1 + cosOmega0) - this.hp_b2 = (1 + cosOmega0) / 2 - let hpA0 = 1 + alpha - if (Math.abs(hpA0) < 1e-12) hpA0 = 1e-12 - this.hp_a1 = (-2 * cosOmega0) / hpA0 - this.hp_a2 = (1 - alpha) / hpA0 - this.hp_b0 /= hpA0 - this.hp_b1 /= hpA0 - this.hp_b2 /= hpA0 + + const fc = Math.max(1, Math.min(SAMPLE_RATE * 0.49, band)) + const width = Math.max(1e-6, widthIn) + const Q = Math.max(1e-4, fc / width) + + const omega0 = (2 * Math.PI * fc) / SAMPLE_RATE + const cos0 = Math.cos(omega0) + const sin0 = Math.sin(omega0) + const alpha = sin0 / (2 * Q) + + const a0 = 1 + alpha + const invA0 = 1 / a0 + const a1 = -2 * cos0 * invA0 + const a2 = (1 - alpha) * invA0 + + const lpB0 = (1 - cos0) * 0.5 * invA0 + const lpB1 = (1 - cos0) * invA0 + const lpB2 = lpB0 + + this.lp_b0 = lpB0 + this.lp_b1 = lpB1 + this.lp_b2 = lpB2 + this.lp_a1 = a1 + this.lp_a2 = a2 + + const hpB0 = (1 + cos0) * 0.5 * invA0 + const hpB1 = -(1 + cos0) * invA0 + const hpB2 = hpB0 + + this.hp_b0 = hpB0 + this.hp_b1 = hpB1 + this.hp_b2 = hpB2 + this.hp_a1 = a1 + this.hp_a2 = a2 } update(filters) { - const { - level = 0, - monoLevel = 0, - filterBand = 0, - filterWidth = 0 - } = filters.karaoke || {} - this.level = Math.max(0, Math.min(1, level)) - this.monoLevel = Math.max(0, Math.min(1, monoLevel)) - this.filterBand = filterBand - this.filterWidth = filterWidth + const k = filters?.karaoke || {} + const level = k.level || 0 + const monoLevel = k.monoLevel || 0 + + this.level = level <= 0 ? 0 : level >= 1 ? 1 : level + this.monoLevel = monoLevel <= 0 ? 0 : monoLevel >= 1 ? 1 : monoLevel + this.filterBand = k.filterBand || 0 + this.filterWidth = k.filterWidth || 0 + this.updateCoefficients() - this.lp_left_x1 = this.lp_left_x2 = this.lp_left_y1 = this.lp_left_y2 = 0 - this.lp_right_x1 = - this.lp_right_x2 = - this.lp_right_y1 = - this.lp_right_y2 = - 0 - this.hp_left_x1 = this.hp_left_x2 = this.hp_left_y1 = this.hp_left_y2 = 0 - this.hp_right_x1 = - this.hp_right_x2 = - this.hp_right_y1 = - this.hp_right_y2 = - 0 + this._resetFilterState() } process(chunk) { - if (this.level === 0 && this.monoLevel === 0) return chunk + const level = this.level + const monoLevel = this.monoLevel + if (!level && !monoLevel) return chunk const frames = chunk.length >> 2 - if (frames === 0) return chunk + if (!frames) return chunk + + this._ensureBuffers(frames) + const outLBuf = this._bufL + const outRBuf = this._bufR + + const doFilter = !!(level && this.filterBand && this.filterWidth) - const inv32768 = this._inv32768 const lp_b0 = this.lp_b0, lp_b1 = this.lp_b1, lp_b2 = this.lp_b2, @@ -114,6 +131,7 @@ export default class Karaoke { hp_b2 = this.hp_b2, hp_a1 = this.hp_a1, hp_a2 = this.hp_a2 + let lpLx1 = this.lp_left_x1, lpLx2 = this.lp_left_x2, lpLy1 = this.lp_left_y1, @@ -132,31 +150,19 @@ export default class Karaoke { hpRy2 = this.hp_right_y2 let originalEnergy = 0 - for (let i = 0; i < chunk.length; i += 4) { - const l = chunk.readInt16LE(i) * inv32768 - const r = chunk.readInt16LE(i + 2) * inv32768 - originalEnergy += l * l + r * r - } - const denom = frames * 2 || 1 - originalEnergy /= denom - - const processedLeft = new Float32Array(frames) - const processedRight = new Float32Array(frames) + let processedEnergy = 0 - const doFilter = - this.level > 0 && this.filterBand !== 0 && this.filterWidth !== 0 - const monoLevel = this.monoLevel - const level = this.level + for (let f = 0, bi = 0; f < frames; f++, bi += 4) { + let left = chunk.readInt16LE(bi) * INV_16 + let right = chunk.readInt16LE(bi + 2) * INV_16 - let fi = 0 - for (let i = 0; i < chunk.length; i += 4) { - let left = chunk.readInt16LE(i) * inv32768 - let right = chunk.readInt16LE(i + 2) * inv32768 + originalEnergy += left * left + right * right - if (monoLevel > 0) { + if (monoLevel) { const mid = (left + right) * 0.5 - left = left - mid * monoLevel - right = right - mid * monoLevel + const sub = mid * monoLevel + left -= sub + right -= sub } if (doFilter) { @@ -170,6 +176,7 @@ export default class Karaoke { lpLx1 = left lpLy2 = lpLy1 lpLy1 = lowLeft + const lowRight = lp_b0 * right + lp_b1 * lpRx1 + @@ -180,6 +187,7 @@ export default class Karaoke { lpRx1 = right lpRy2 = lpRy1 lpRy1 = lowRight + const highLeft = hp_b0 * left + hp_b1 * hpLx1 + @@ -190,6 +198,7 @@ export default class Karaoke { hpLx1 = left hpLy2 = hpLy1 hpLy1 = highLeft + const highRight = hp_b0 * right + hp_b1 * hpRx1 + @@ -206,46 +215,44 @@ export default class Karaoke { right = lowRight + cancelled * level } - processedLeft[fi] = left - processedRight[fi] = right - fi++ + outLBuf[f] = left + outRBuf[f] = right + processedEnergy += left * left + right * right } - let processedEnergy = 0 - for (let i = 0; i < frames; i++) { - const l = processedLeft[i] - const r = processedRight[i] - processedEnergy += l * l + r * r - } + const denom = frames * 2 + originalEnergy /= denom processedEnergy /= denom let gain = 1 - if (processedEnergy > 1e-15) + if (processedEnergy > 1e-15) { gain = Math.sqrt(Math.max(1e-12, originalEnergy) / processedEnergy) - gain = Math.min(gain, MAX_OUTPUT_GAIN) + if (gain > MAX_OUTPUT_GAIN) gain = MAX_OUTPUT_GAIN + } else { + gain = MAX_OUTPUT_GAIN + } const prev = this._prevGain || MAX_OUTPUT_GAIN - const attackFactor = gain > prev ? 0.06 : 0.3 - const smoothedTarget = prev + (gain - prev) * attackFactor + const smooth = gain > prev ? 0.06 : 0.3 + const target = prev + (gain - prev) * smooth let current = prev - const step = (smoothedTarget - current) / Math.max(1, frames) + const step = (target - prev) / frames - let wi = 0 - for (let i = 0; i < chunk.length; i += 4) { + for (let f = 0, bi = 0; f < frames; f++, bi += 4) { current += step - let outL = processedLeft[wi] * current - let outR = processedRight[wi] * current + + let outL = outLBuf[f] * current + let outR = outRBuf[f] * current const peak = Math.max(Math.abs(outL), Math.abs(outR)) if (peak > 0.9999) { - const limiterScale = 0.9999 / peak - outL *= limiterScale - outR *= limiterScale + const s = 0.9999 / peak + outL *= s + outR *= s } - chunk.writeInt16LE(clamp16Bit(outL * 32768), i) - chunk.writeInt16LE(clamp16Bit(outR * 32768), i + 2) - wi++ + chunk.writeInt16LE(clamp16Bit(outL * SCALE_16), bi) + chunk.writeInt16LE(clamp16Bit(outR * SCALE_16), bi + 2) } this.lp_left_x1 = lpLx1 @@ -256,6 +263,7 @@ export default class Karaoke { this.lp_right_x2 = lpRx2 this.lp_right_y1 = lpRy1 this.lp_right_y2 = lpRy2 + this.hp_left_x1 = hpLx1 this.hp_left_x2 = hpLx2 this.hp_left_y1 = hpLy1 @@ -265,7 +273,7 @@ export default class Karaoke { this.hp_right_y1 = hpRy1 this.hp_right_y2 = hpRy2 - this._prevGain = smoothedTarget + this._prevGain = target return chunk } } diff --git a/src/playback/filters/phaser.js b/src/playback/filters/phaser.js index 255716a..9cb52cd 100644 --- a/src/playback/filters/phaser.js +++ b/src/playback/filters/phaser.js @@ -88,4 +88,15 @@ export default class Phaser { return chunk } + + clear() { + for (const filter of [...this.leftFilters, ...this.rightFilters]) { + filter.x1 = 0 + filter.y1 = 0 + } + this.lastLeftFeedback = 0 + this.lastRightFeedback = 0 + this.leftLfo.phase = 0 + this.rightLfo.phase = Math.PI / 2 + } } diff --git a/src/playback/filters/reverb.js b/src/playback/filters/reverb.js index 4423420..1318759 100644 --- a/src/playback/filters/reverb.js +++ b/src/playback/filters/reverb.js @@ -1,159 +1,172 @@ -import { SAMPLE_RATE } from '../../constants.js' -import { clamp16Bit } from './dsp/clamp16Bit.js' -import Allpass from './dsp/allpass.js' -import DelayLine from './dsp/delay.js' - -const COMB_DELAYS = [1116, 1188, 1277, 1356, 1422, 1491, 1557, 1617] -const ALLPASS_DELAYS = [556, 441, 341, 225] -const STEREO_SPREAD = 23 -const SCALE_WET = 3.0 -const SCALE_DRY = 2.0 -const SCALE_DAMP = 0.4 -const SCALE_ROOM = 0.28 -const OFFSET_ROOM = 0.7 - -class CombFilter { - constructor(size) { - this.buffer = new DelayLine(size) - this.filterStore = 0 - this.damp1 = 0 - this.damp2 = 0 - this.feedback = 0 - } - - setDamp(val) { - this.damp1 = val - this.damp2 = 1 - val - } - - setFeedback(val) { - this.feedback = val - } - - process(input) { - const output = this.buffer.read(0) - this.filterStore = output * this.damp2 + this.filterStore * this.damp1 - this.buffer.write(clamp16Bit(input + this.filterStore * this.feedback)) - return output - } - - clear() { - this.buffer.clear() - this.filterStore = 0 - } -} - -export default class Reverb { - constructor() { - this.priority = 10 - - this.combFiltersL = COMB_DELAYS.map( - (delay) => new CombFilter(Math.floor((delay * SAMPLE_RATE) / 44100)) - ) - this.combFiltersR = COMB_DELAYS.map( - (delay) => - new CombFilter( - Math.floor(((delay + STEREO_SPREAD) * SAMPLE_RATE) / 44100) - ) - ) - - this.allpassFiltersL = ALLPASS_DELAYS.map( - (delay) => new DelayLine(Math.floor((delay * SAMPLE_RATE) / 44100)) - ) - this.allpassFiltersR = ALLPASS_DELAYS.map( - (delay) => - new DelayLine( - Math.floor(((delay + STEREO_SPREAD) * SAMPLE_RATE) / 44100) - ) - ) - - this.allpassCoeff = 0.5 - - this.allpassStateL = ALLPASS_DELAYS.map(() => ({ x1: 0, y1: 0 })) - this.allpassStateR = ALLPASS_DELAYS.map(() => ({ x1: 0, y1: 0 })) - - this.wet = 0 - this.dry = 1.0 - this.roomSize = 0.5 - this.damping = 0.5 - this.width = 1.0 - } - - update(filters) { - const settings = filters.reverb || {} - - const mix = Math.max(0, Math.min(settings.mix || 0, 1.0)) - this.wet = mix * SCALE_WET - this.dry = (1.0 - mix) * SCALE_DRY - - this.roomSize = Math.max(0, Math.min(settings.roomSize || 0.5, 1.0)) - const roomScaled = this.roomSize * SCALE_ROOM + OFFSET_ROOM - - this.damping = Math.max(0, Math.min(settings.damping || 0.5, 1.0)) - const dampScaled = this.damping * SCALE_DAMP - - this.width = Math.max(0, Math.min(settings.width || 1.0, 1.0)) - - for (const comb of [...this.combFiltersL, ...this.combFiltersR]) { - comb.setFeedback(roomScaled) - comb.setDamp(dampScaled) - } - } - - process(chunk) { - if (this.wet === 0) { - return chunk - } - - for (let i = 0; i < chunk.length; i += 4) { - const leftInput = chunk.readInt16LE(i) - const rightInput = chunk.readInt16LE(i + 2) - - const monoInput = (leftInput + rightInput) * 0.5 - - let leftOut = 0 - let rightOut = 0 - - for (let j = 0; j < this.combFiltersL.length; j++) { - leftOut += this.combFiltersL[j].process(monoInput) - rightOut += this.combFiltersR[j].process(monoInput) - } - - for (let j = 0; j < this.allpassFiltersL.length; j++) { - leftOut = this.processAllpass( - leftOut, - this.allpassFiltersL[j], - this.allpassStateL[j] - ) - rightOut = this.processAllpass( - rightOut, - this.allpassFiltersR[j], - this.allpassStateR[j] - ) - } - - const wet1 = this.wet * (this.width * 0.5 + 0.5) - const wet2 = this.wet * ((1.0 - this.width) * 0.5) - - const finalLeft = leftInput * this.dry + leftOut * wet1 + rightOut * wet2 - const finalRight = - rightInput * this.dry + rightOut * wet1 + leftOut * wet2 - - chunk.writeInt16LE(clamp16Bit(finalLeft), i) - chunk.writeInt16LE(clamp16Bit(finalRight), i + 2) - } - - return chunk - } - - processAllpass(input, delayLine, state) { - const delayed = delayLine.read(0) - const output = - -input + delayed + this.allpassCoeff * (input - state.y1) - - delayLine.write(clamp16Bit(input)) - state.y1 = output - - return output - } -} +import { SAMPLE_RATE } from '../../constants.js' +import { clamp16Bit } from './dsp/clamp16Bit.js' +import DelayLine from './dsp/delay.js' + +const COMB_DELAYS = [1116, 1188, 1277, 1356, 1422, 1491, 1557, 1617] +const ALLPASS_DELAYS = [556, 441, 341, 225] +const STEREO_SPREAD = 23 +const SCALE_WET = 3.0 +const SCALE_DRY = 2.0 +const SCALE_DAMP = 0.4 +const SCALE_ROOM = 0.28 +const OFFSET_ROOM = 0.7 + +class CombFilter { + constructor(size) { + this.buffer = new DelayLine(size) + this.filterStore = 0 + this.damp1 = 0 + this.damp2 = 0 + this.feedback = 0 + } + + setDamp(val) { + this.damp1 = val + this.damp2 = 1 - val + } + + setFeedback(val) { + this.feedback = val + } + + process(input) { + const output = this.buffer.read(0) + this.filterStore = output * this.damp2 + this.filterStore * this.damp1 + this.buffer.write(clamp16Bit(input + this.filterStore * this.feedback)) + return output + } + + clear() { + this.buffer.clear() + this.filterStore = 0 + } +} + +export default class Reverb { + constructor() { + this.priority = 10 + + this.combFiltersL = COMB_DELAYS.map( + (delay) => new CombFilter(Math.floor((delay * SAMPLE_RATE) / 44100)) + ) + this.combFiltersR = COMB_DELAYS.map( + (delay) => + new CombFilter( + Math.floor(((delay + STEREO_SPREAD) * SAMPLE_RATE) / 44100) + ) + ) + + this.allpassFiltersL = ALLPASS_DELAYS.map( + (delay) => new DelayLine(Math.floor((delay * SAMPLE_RATE) / 44100)) + ) + this.allpassFiltersR = ALLPASS_DELAYS.map( + (delay) => + new DelayLine( + Math.floor(((delay + STEREO_SPREAD) * SAMPLE_RATE) / 44100) + ) + ) + + this.allpassCoeff = 0.5 + + this.allpassStateL = ALLPASS_DELAYS.map(() => ({ x1: 0, y1: 0 })) + this.allpassStateR = ALLPASS_DELAYS.map(() => ({ x1: 0, y1: 0 })) + + this.wet = 0 + this.dry = 1.0 + this.roomSize = 0.5 + this.damping = 0.5 + this.width = 1.0 + } + + update(filters) { + const settings = filters.reverb || {} + + const mix = Math.max(0, Math.min(settings.mix || 0, 1.0)) + this.wet = mix * SCALE_WET + this.dry = (1.0 - mix) * SCALE_DRY + + this.roomSize = Math.max(0, Math.min(settings.roomSize || 0.5, 1.0)) + const roomScaled = this.roomSize * SCALE_ROOM + OFFSET_ROOM + + this.damping = Math.max(0, Math.min(settings.damping || 0.5, 1.0)) + const dampScaled = this.damping * SCALE_DAMP + + this.width = Math.max(0, Math.min(settings.width || 1.0, 1.0)) + + for (const comb of [...this.combFiltersL, ...this.combFiltersR]) { + comb.setFeedback(roomScaled) + comb.setDamp(dampScaled) + } + } + + process(chunk) { + if (this.wet === 0) { + return chunk + } + + for (let i = 0; i < chunk.length; i += 4) { + const leftInput = chunk.readInt16LE(i) + const rightInput = chunk.readInt16LE(i + 2) + + const monoInput = (leftInput + rightInput) * 0.5 + + let leftOut = 0 + let rightOut = 0 + + for (let j = 0; j < this.combFiltersL.length; j++) { + leftOut += this.combFiltersL[j].process(monoInput) + rightOut += this.combFiltersR[j].process(monoInput) + } + + for (let j = 0; j < this.allpassFiltersL.length; j++) { + leftOut = this.processAllpass( + leftOut, + this.allpassFiltersL[j], + this.allpassStateL[j] + ) + rightOut = this.processAllpass( + rightOut, + this.allpassFiltersR[j], + this.allpassStateR[j] + ) + } + + const wet1 = this.wet * (this.width * 0.5 + 0.5) + const wet2 = this.wet * ((1.0 - this.width) * 0.5) + + const finalLeft = leftInput * this.dry + leftOut * wet1 + rightOut * wet2 + const finalRight = + rightInput * this.dry + rightOut * wet1 + leftOut * wet2 + + chunk.writeInt16LE(clamp16Bit(finalLeft), i) + chunk.writeInt16LE(clamp16Bit(finalRight), i + 2) + } + + return chunk + } + + processAllpass(input, delayLine, state) { + const delayed = delayLine.read(0) + const output = -input + delayed + this.allpassCoeff * (input - state.y1) + + delayLine.write(clamp16Bit(input)) + state.y1 = output + + return output + } + + clear() { + for (const comb of [...this.combFiltersL, ...this.combFiltersR]) { + comb.clear() + } + + for (const allpass of [...this.allpassFiltersL, ...this.allpassFiltersR]) { + allpass.clear() + } + + for (const state of [...this.allpassStateL, ...this.allpassStateR]) { + state.x1 = 0 + state.y1 = 0 + } + } +} diff --git a/src/playback/filtersManager.js b/src/playback/filtersManager.js index 5c90937..199fad5 100644 --- a/src/playback/filtersManager.js +++ b/src/playback/filtersManager.js @@ -14,16 +14,35 @@ import Lowpass from './filters/lowpass.js' import Phaser from './filters/phaser.js' import Reverb from './filters/reverb.js' import Rotation from './filters/rotation.js' +import Spatial from './filters/spatial.js' import Timescale from './filters/timescale.js' import Tremolo from './filters/tremolo.js' import Vibrato from './filters/vibrato.js' -import Spatial from './filters/spatial.js' + +const FILTER_CLASSES = { + tremolo: Tremolo, + vibrato: Vibrato, + lowpass: Lowpass, + highpass: Highpass, + rotation: Rotation, + karaoke: Karaoke, + distortion: Distortion, + channelMix: ChannelMix, + equalizer: Equalizer, + chorus: Chorus, + compressor: Compressor, + echo: Echo, + phaser: Phaser, + timescale: Timescale, + spatial: Spatial +} export class FiltersManager extends Transform { constructor(nodelink, options = {}) { super(options) this.nodelink = nodelink this.activeFilters = [] + this.filterInstances = {} this.availableFilters = { tremolo: new Tremolo(), @@ -47,7 +66,7 @@ export class FiltersManager extends Transform { if (this.nodelink.extensions?.filters) { for (const [name, filter] of this.nodelink.extensions.filters) { - this.availableFilters[name] = filter + this.filterInstances[name] = filter } } @@ -56,15 +75,23 @@ export class FiltersManager extends Transform { update(filters) { this.activeFilters = [] + const settings = filters.filters || filters - for (const filterName in this.availableFilters) { - const filter = this.availableFilters[filterName] + for (const name in settings) { + const config = settings[name] + if (!config) continue - if (filters.filters?.[filterName]) { - this.activeFilters.push(filter) + if (FILTER_CLASSES[name] && !this.filterInstances[name]) { + this.filterInstances[name] = new FILTER_CLASSES[name]() } - filter.update(filters.filters || filters) + const instance = this.filterInstances[name] + if (instance) { + this.activeFilters.push(instance) + if (typeof instance.update === 'function') { + instance.update(settings) + } + } } this.activeFilters.sort((a, b) => (a.priority || 99) - (b.priority || 99)) @@ -78,7 +105,7 @@ export class FiltersManager extends Transform { } } - _transform(chunk, encoding, callback) { + _transform(chunk, _encoding, callback) { if (this.activeFilters.length === 0) { this.push(chunk) return callback() diff --git a/src/playback/opus/Opus.js b/src/playback/opus/Opus.js index 56df486..08ef2b5 100644 --- a/src/playback/opus/Opus.js +++ b/src/playback/opus/Opus.js @@ -1,13 +1,14 @@ -import { Transform } from 'node:stream' -import { createRequire } from 'node:module' import { Buffer } from 'node:buffer' +import { createRequire } from 'node:module' +import { Transform } from 'node:stream' const require = createRequire(import.meta.url) const OPUS_CTL = { BITRATE: 4002, FEC: 4012, - PLP: 4014 + PLP: 4014, + DTX: 4016 } const RING_SIZE = 512 * 1024 @@ -16,6 +17,7 @@ let ACTIVE_LIB = null const _getLib = () => { if (ACTIVE_LIB) return ACTIVE_LIB const libs = [ + { name: '@toddynnn/voice-opus', pick: (m) => m.OpusEncoder }, { name: 'toddy-mediaplex', pick: (m) => m.OpusEncoder }, { name: '@discordjs/opus', pick: (m) => m.OpusEncoder }, { name: 'opusscript', pick: (m) => m } @@ -48,17 +50,14 @@ const _createInstance = (rate, channels, app) => { return { instance: new Encoder(rate, channels, type), lib } } -const _applyCtl = (enc, libName, id, val) => { +const _applyCtl = (enc, _libName, id, val) => { if (!enc) throw new Error('Encoder not ready.') - if (libName === 'toddy-mediaplex') { - if (id === OPUS_CTL.BITRATE) return enc.setBitrate(val) - if (id === OPUS_CTL.FEC) return enc.setOption('inband_fec', val) - if (id === OPUS_CTL.PLP) - return enc.setOption('packet_loss_expectation', val) + if (id === OPUS_CTL.BITRATE) { + return enc.setBitrate(val) } - const fn = enc.applyEncoderCTL || enc.encoderCTL + const fn = enc.applyEncoderCTL || enc.applyEncoderCtl || enc.encoderCTL if (typeof fn === 'function') fn.call(enc, id, val) } @@ -166,6 +165,9 @@ export class Encoder extends Transform { const val = p < 0 ? 0 : p > 100 ? 100 : Math.round(p) _applyCtl(this.enc, this.lib.name, OPUS_CTL.PLP, val) } + setDTX(enabled = false) { + _applyCtl(this.enc, this.lib.name, OPUS_CTL.DTX, enabled ? 1 : 0) + } } export class Decoder extends Transform { diff --git a/src/playback/player.js b/src/playback/player.js index c3b20d3..7cdad69 100644 --- a/src/playback/player.js +++ b/src/playback/player.js @@ -37,7 +37,12 @@ export class Player { this.position = 0 this.connStatus = 'idle' this.connection = null - this.voice = { sessionId: null, token: null, endpoint: null } + this.voice = { + sessionId: null, + token: null, + endpoint: null, + channelId: null + } this.streamInfo = null this.lastManualReconnect = 0 this.audioMixer = null @@ -138,6 +143,7 @@ export class Player { this.connection = discordVoice.joinVoiceChannel({ guildId: this.guildId, userId: this.session.userId, + channelId: this.voice.channelId || this.guildId, // dave somehow accepted guildId lol encryption: this.nodelink.options?.audio.encryption }) this.connection.on('stateChange', (_, s) => { @@ -163,6 +169,10 @@ export class Player { this._lastStreamDataTime = Date.now() }) }) + + if (this.nodelink.voiceRelay?.attach) { + this.nodelink.voiceRelay.attach(this.connection, this.guildId) + } } _onConn(state) { @@ -375,7 +385,7 @@ export class Player { reason: reason }) - if (this.audioMixer && this.audioMixer.autoCleanup) { + if (this.audioMixer?.autoCleanup) { this.audioMixer.clearLayers('MAIN_ENDED') } } @@ -1097,7 +1107,7 @@ export class Player { updateVoice(voicePayload = {}, force = false) { if (this.destroying) return - const { sessionId, token, endpoint } = voicePayload + const { sessionId, token, endpoint, channelId } = voicePayload let changed = false if (sessionId !== undefined && this.voice.sessionId !== sessionId) { @@ -1112,6 +1122,10 @@ export class Player { this.voice.endpoint = endpoint changed = true } + if (channelId !== undefined && this.voice.channelId !== channelId) { + this.voice.channelId = channelId + changed = true + } if (this.voice.sessionId && this.voice.token && this.voice.endpoint) { if (!changed && !force) { @@ -1129,6 +1143,9 @@ export class Player { `Updating voice state for guild ${this.guildId}` ) if (!this.connection) this._initConnection() + if (this.voice.channelId) { + this.connection.channelId = this.voice.channelId + } this.connection.voiceStateUpdate({ session_id: this.voice.sessionId }) this.connection.voiceServerUpdate({ token: this.voice.token, @@ -1230,7 +1247,7 @@ export class Player { } const fetched = await this.nodelink.sources.getTrackStream( - trackPayload.info, + urlData.newTrack?.info || trackPayload.info, urlData.url, urlData.protocol, urlData.additionalData diff --git a/src/playback/streamProcessor.js b/src/playback/streamProcessor.js index bf8cc1e..eccaf5c 100644 --- a/src/playback/streamProcessor.js +++ b/src/playback/streamProcessor.js @@ -1,17 +1,19 @@ import { Buffer } from 'node:buffer' -import { PassThrough, Readable, Transform, pipeline } from 'node:stream' +import { PassThrough, pipeline, Readable, Transform } from 'node:stream' import LibSampleRate from '@alexanderolsen/libsamplerate-js' import FAAD2NodeDecoder from '@ecliptia/faad2-wasm/faad2_node_decoder.js' import { SeekError, seekableStream } from '@ecliptia/seekable-stream' +import { SymphoniaDecoder } from '@toddynnn/symphonia-decoder' import * as MP4Box from 'mp4box' - import { normalizeFormat, SupportedFormats } from '../constants.js' +import { logger } from '../utils.js' +import FlvDemuxer from './demuxers/Flv.js' import WebmOpusDemuxer from './demuxers/WebmOpus.js' import { FiltersManager } from './filtersManager.js' import { Decoder as OpusDecoder, Encoder as OpusEncoder } from './opus/Opus.js' +import { RingBuffer } from './RingBuffer.js' import { VolumeTransformer } from './VolumeTransformer.js' -import { SymphoniaDecoder } from '@toddynnn/symphonia-decoder' const AUDIO_CONFIG = Object.freeze({ sampleRate: 48000, @@ -25,6 +27,8 @@ const BUFFER_THRESHOLDS = Object.freeze({ minCompressed: 128 * 1024 }) +const AAC_BUFFER_SIZE = 2 * 1024 * 1024 // 2MB + const AUDIO_CONSTANTS = Object.freeze({ pcmFloatFactor: 32767, maxDecodesPerTick: 5, @@ -37,9 +41,9 @@ const MPEGTS_CONFIG = Object.freeze({ aacStreamType: 0x0f }) -const DOWNMIX_COEFFICIENTS = Object.freeze({ - center: 0.7071, - surround: 0.7071, +const _DOWNMIX_COEFFICIENTS = Object.freeze({ + center: Math.SQRT1_2, + surround: Math.SQRT1_2, lfe: 0.5 }) @@ -161,6 +165,8 @@ const _isMp4Format = (type) => const _isWebmFormat = (type) => type.indexOf('webm') !== -1 +const _isFlvFormat = (type) => type.indexOf('flv') !== -1 + class BaseAudioResource { constructor() { this.pipes = [] @@ -303,7 +309,7 @@ class SymphoniaDecoderStream extends Transform { return this.decoder !== null && !this._aborted && !this.isFinished } - _transform(chunk, encoding, callback) { + _transform(chunk, _encoding, callback) { if (this._aborted || !this.decoder) return callback() this.decoder.push(chunk) @@ -523,53 +529,40 @@ class MPEGTSToAACStream extends Transform { highWaterMark: AUDIO_CONFIG.highWaterMark }) - this.buffer = EMPTY_BUFFER + this.ringBuffer = new RingBuffer(BUFFER_THRESHOLDS.maxCompressed) this.patPmtId = null this.aacPid = null - this.aacData = EMPTY_BUFFER + this.aacData = [] this.aacPidFound = false this._aborted = false } abort() { this._aborted = true - this.buffer = EMPTY_BUFFER - this.aacData = EMPTY_BUFFER + this.ringBuffer.clear() + this.aacData = [] } - _transform(chunk, encoding, callback) { + _transform(chunk, _encoding, callback) { if (this._aborted) { callback() return } try { - const data = - this.buffer.length > 0 ? Buffer.concat([this.buffer, chunk]) : chunk - - this.buffer = EMPTY_BUFFER - - const dataLength = data.length - let position = 0 + this.ringBuffer.write(chunk) while ( - position <= dataLength - MPEGTS_CONFIG.packetSize && + this.ringBuffer.length >= MPEGTS_CONFIG.packetSize && !this._aborted ) { - if (data[position] !== MPEGTS_CONFIG.syncByte) { - const syncIndex = data.indexOf(MPEGTS_CONFIG.syncByte, position + 1) - if (syncIndex === -1) { - position = dataLength - break - } - position = syncIndex + const head = this.ringBuffer.peek(1) + if (head[0] !== MPEGTS_CONFIG.syncByte) { + this.ringBuffer.read(1) continue } - const packet = data.subarray( - position, - position + MPEGTS_CONFIG.packetSize - ) + const packet = this.ringBuffer.read(MPEGTS_CONFIG.packetSize) const payloadUnitStartIndicator = !!(packet[1] & 0x40) const pid = ((packet[1] & 0x1f) << 8) + packet[2] @@ -579,18 +572,11 @@ class MPEGTSToAACStream extends Transform { if (adaptationFieldControl > 1) { offset = 5 + packet[4] if (offset >= MPEGTS_CONFIG.packetSize) { - position += MPEGTS_CONFIG.packetSize continue } } this._processPacket(packet, pid, payloadUnitStartIndicator, offset) - - position += MPEGTS_CONFIG.packetSize - } - - if (position < dataLength && !this._aborted) { - this.buffer = data.subarray(position) } callback() @@ -645,8 +631,8 @@ class MPEGTSToAACStream extends Transform { _processAACPacket(packet, pusi, offset) { if (pusi) { if (this.aacData.length > 0 && !this._aborted) { - this.push(this.aacData) - this.aacData = EMPTY_BUFFER + this.push(Buffer.concat(this.aacData)) + this.aacData = [] } const pesHeaderLength = packet[offset + 8] @@ -656,23 +642,23 @@ class MPEGTSToAACStream extends Transform { } if (!this._aborted) { - this.aacData = Buffer.concat([this.aacData, packet.subarray(offset)]) + this.aacData.push(packet.subarray(offset)) } } _flush(callback) { if (this.aacData.length > 0 && !this._aborted) { - this.push(this.aacData) + this.push(Buffer.concat(this.aacData)) } - this.aacData = EMPTY_BUFFER - this.buffer = EMPTY_BUFFER + this.aacData = [] + this.ringBuffer.clear() callback() } _destroy(err, callback) { this._aborted = true - this.buffer = EMPTY_BUFFER - this.aacData = EMPTY_BUFFER + this.ringBuffer.dispose() + this.aacData = [] super._destroy(err, callback) } } @@ -688,7 +674,7 @@ class AACDecoderStream extends Transform { this.isDecoderReady = false this.isConfigured = false this.pendingChunks = [] - this.buffer = Buffer.alloc(0) + this.ringBuffer = new RingBuffer(AAC_BUFFER_SIZE) this.resamplingQuality = options?.resamplingQuality || 'fastest' this.resamplerCreationPromise = null @@ -701,6 +687,13 @@ class AACDecoderStream extends Transform { .catch((err) => this.emit('error', err)) } + _destroy(err, cb) { + this.ringBuffer.dispose() + if (this.decoder) this.decoder.free?.() + if (this.resampler) this.resampler.destroy?.() + super._destroy(err, cb) + } + _downmixToStereo(interleavedPCM, channels, samplesPerChannel) { if (channels === 2) return interleavedPCM @@ -715,8 +708,8 @@ class AACDecoderStream extends Transform { return stereo } - const CENTER_MIX = 0.7071 - const SURROUND_MIX = 0.7071 + const CENTER_MIX = Math.SQRT1_2 + const SURROUND_MIX = Math.SQRT1_2 const LFE_MIX = 0.5 for (let i = 0; i < samplesPerChannel; i++) { @@ -790,7 +783,10 @@ class AACDecoderStream extends Transform { this.pendingChunks = [] } - _findADTSFrame(buffer) { + _findADTSFrame() { + const buffer = this.ringBuffer.peek(this.ringBuffer.length) + if (!buffer) return null + for (let i = 0; i < buffer.length - 7; i++) { const syncword = (buffer[i] << 4) | (buffer[i + 1] >> 4) if (syncword === 0xfff) { @@ -821,18 +817,18 @@ class AACDecoderStream extends Transform { this._decodeChunk(chunk, encoding, callback) } - async _decodeChunk(chunk, encoding, callback) { + async _decodeChunk(chunk, _encoding, callback) { try { - this.buffer = Buffer.concat([this.buffer, chunk]) + this.ringBuffer.write(chunk) if (!this.isConfigured) { - const frameInfo = this._findADTSFrame(this.buffer) + const frameInfo = this._findADTSFrame() if (frameInfo) { try { await this.decoder.configure(frameInfo.frame, true) this.isConfigured = true } catch (err) { - this.buffer = this.buffer.subarray(frameInfo.end) + this.ringBuffer.read(frameInfo.end) return callback(err) } } else { @@ -840,8 +836,8 @@ class AACDecoderStream extends Transform { } } - while (this.buffer.length > 0) { - const frameInfo = this._findADTSFrame(this.buffer) + while (this.ringBuffer.length > 0) { + const frameInfo = this._findADTSFrame() if (!frameInfo) break @@ -892,11 +888,11 @@ class AACDecoderStream extends Transform { this.push(Buffer.from(pcmInt16.buffer)) } } - } catch (decodeErr) { + } catch (_decodeErr) { // Skip bad frame } - this.buffer = this.buffer.subarray(frameInfo.end) + this.ringBuffer.read(frameInfo.end) } callback() @@ -906,9 +902,9 @@ class AACDecoderStream extends Transform { } _flush(callback) { - if (this.buffer.length > 0 && this.isConfigured) { + if (this.ringBuffer.length > 0 && this.isConfigured) { try { - const frameInfo = this._findADTSFrame(this.buffer) + const frameInfo = this._findADTSFrame() if (frameInfo) { const result = this.decoder.decode(frameInfo.frame) if (result?.pcm) { @@ -919,7 +915,7 @@ class AACDecoderStream extends Transform { this.push(Buffer.from(pcmInt16.buffer)) } } - } catch (err) {} + } catch (_err) {} } if (this.resampler) this.resampler.destroy?.() @@ -974,7 +970,7 @@ class MP4ToAACStream extends Transform { } } - this.mp4boxFile.onSamples = (id, user, samples) => { + this.mp4boxFile.onSamples = (_id, _user, samples) => { if (this._aborted) return try { @@ -1056,7 +1052,7 @@ class MP4ToAACStream extends Transform { } } - _transform(chunk, encoding, callback) { + _transform(chunk, _encoding, callback) { if (this._aborted || !this.mp4boxFile) { callback() return @@ -1277,7 +1273,7 @@ class FMP4ToAACStream extends Transform { return null } - _transform(chunk, encoding, callback) { + _transform(chunk, _encoding, callback) { try { if (!this.initSegmentProcessed && chunk.length > 8) { const boxType = chunk.toString('ascii', 4, 8) @@ -1297,7 +1293,7 @@ class FMP4ToAACStream extends Transform { } callback() - } catch (err) { + } catch (_err) { callback() } } @@ -1307,13 +1303,78 @@ class FMP4ToAACStream extends Transform { } } +class FLVToAACStream extends Transform { + constructor(options) { + super(options) + this.demuxer = new FlvDemuxer() + this.audioConfig = null + this._aborted = false + + this.demuxer.on('data', (audioTag) => { + if (this._aborted) return + this._processAudioTag(audioTag) + }) + + this.demuxer.on('error', (err) => { + if (!this._aborted) this.emit('error', err) + }) + } + + abort() { + this._aborted = true + this.demuxer.destroy() + } + + _processAudioTag(tag) { + const header = tag[0] + const format = (header & 0xf0) >> 4 + + if (format === 10) { + const aacPacketType = tag[1] + if (aacPacketType === 0) { + this.audioConfig = this._parseAudioSpecificConfig(tag.subarray(2)) + } else if (aacPacketType === 1 && this.audioConfig) { + const adtsHeader = _createAdtsHeader( + tag.length - 2, + this.audioConfig.profile, + this.audioConfig.samplingIndex, + this.audioConfig.channelCount + ) + this.push(Buffer.concat([adtsHeader, tag.subarray(2)])) + } + } else if (format === 2) { + this.push(tag.subarray(1)) + } + } + + _parseAudioSpecificConfig(data) { + const objectType = (data[0] & 0xf8) >> 3 + const samplingIndex = ((data[0] & 0x07) << 1) | ((data[1] & 0x80) >> 7) + const channelConfig = (data[1] & 0x78) >> 3 + + return { + profile: objectType, + samplingIndex, + channelCount: channelConfig + } + } + + _transform(chunk, encoding, callback) { + this.demuxer.write(chunk, encoding, callback) + } + + _flush(callback) { + this.demuxer.end(callback) + } +} + class MixerTransform extends Transform { constructor(audioMixer) { super() this.audioMixer = audioMixer } - _transform(mainChunk, encoding, callback) { + _transform(mainChunk, _encoding, callback) { if ( !this.audioMixer || !this.audioMixer.enabled || @@ -1326,7 +1387,7 @@ class MixerTransform extends Transform { const layerChunks = this.audioMixer.readLayerChunks(mainChunk.length) const mixed = this.audioMixer.mixBuffers(mainChunk, layerChunks) callback(null, mixed) - } catch (error) { + } catch (_error) { callback(null, mainChunk) } } @@ -1385,6 +1446,9 @@ class StreamAudioResource extends BaseAudioResource { case SupportedFormats.AAC: return this._createAACPipeline(stream, type, resamplingQuality) + case SupportedFormats.FLV: + return this._createFLVPipeline(stream, type, resamplingQuality) + case SupportedFormats.MPEG: case SupportedFormats.FLAC: case SupportedFormats.OGG_VORBIS: @@ -1399,9 +1463,24 @@ class StreamAudioResource extends BaseAudioResource { } } + _createFLVPipeline(stream, _type, resamplingQuality) { + const demuxer = new FLVToAACStream() + const decoder = new AACDecoderStream({ resamplingQuality }) + + this.pipes.push(demuxer, decoder) + + pipeline(stream, demuxer, decoder, (err) => { + if (err && !this._destroyed) { + this.stream?.emit('error', err) + } + }) + + return decoder + } + _createAACPipeline(stream, type, resamplingQuality) { const lowerType = type.toLowerCase() - let aacStream = stream + const _aacStream = stream const streams = [stream] if (_isFmp4Format(lowerType)) { @@ -1484,6 +1563,8 @@ class StreamAudioResource extends BaseAudioResource { frameSize: AUDIO_CONFIG.frameSize }) + opusEncoder.setDTX(false) + const streams = [pcmStream, volumeTransformer] this.pipes.push(volumeTransformer) @@ -1573,7 +1654,8 @@ class StreamAudioResource extends BaseAudioResource { 'FLAC (audio/flac)', 'OGG Vorbis (audio/ogg, audio/vorbis)', 'WAV (audio/wav)', - 'Opus (webm/opus, ogg/opus)' + 'Opus (webm/opus, ogg/opus)', + 'FLV (video/x-flv, flv)' ] return new Error( @@ -1644,30 +1726,34 @@ export const createSeekeableAudioResource = async ( } } -export const createPCMStream = (stream, type, nodelink, volume = 1.0) => { +export const createPCMStream = ( + stream, + type, + nodelink, + volume = 1.0, + filters = {} +) => { const resamplingQuality = nodelink.options.audio.resamplingQuality || 'fastest' const normalizedType = normalizeFormat(type) - let pcmStream + const streams = [stream] switch (normalizedType) { case SupportedFormats.AAC: { const lowerType = type.toLowerCase() - const streams = [stream] if (_isFmp4Format(lowerType)) streams.push(new FMP4ToAACStream()) else if (_isMpegtsFormat(lowerType)) streams.push(new MPEGTSToAACStream()) else if (_isMp4Format(lowerType)) streams.push(new MP4ToAACStream()) - const decoder = new AACDecoderStream({ resamplingQuality }) - streams.push(decoder) - - pipeline(streams, (err) => { - if (err) decoder.emit('error', err) - }) + streams.push(new AACDecoderStream({ resamplingQuality })) + break + } - pcmStream = decoder + case SupportedFormats.FLV: { + streams.push(new FLVToAACStream()) + streams.push(new AACDecoderStream({ resamplingQuality })) break } @@ -1675,33 +1761,21 @@ export const createPCMStream = (stream, type, nodelink, volume = 1.0) => { case SupportedFormats.FLAC: case SupportedFormats.OGG_VORBIS: case SupportedFormats.WAV: { - const decoder = new SymphoniaDecoderStream({ resamplingQuality }) - pipeline(stream, decoder, (err) => { - if (err) decoder.emit('error', err) - }) - pcmStream = decoder + streams.push(new SymphoniaDecoderStream({ resamplingQuality })) break } case SupportedFormats.OPUS: { - const decoder = new OpusDecoder({ - rate: AUDIO_CONFIG.sampleRate, - channels: AUDIO_CONFIG.channels, - frameSize: AUDIO_CONFIG.frameSize - }) - if (_isWebmFormat(type.toLowerCase())) { - const demuxer = new WebmOpusDemuxer() - pipeline(stream, demuxer, decoder, (err) => { - if (err) decoder.emit('error', err) - }) - } else { - pipeline(stream, decoder, (err) => { - if (err) decoder.emit('error', err) - }) + streams.push(new WebmOpusDemuxer()) } - - pcmStream = decoder + streams.push( + new OpusDecoder({ + rate: AUDIO_CONFIG.sampleRate, + channels: AUDIO_CONFIG.channels, + frameSize: AUDIO_CONFIG.frameSize + }) + ) break } @@ -1709,13 +1783,30 @@ export const createPCMStream = (stream, type, nodelink, volume = 1.0) => { throw new Error(`Unsupported audio format: '${type}'`) } - if (volume !== 1.0) { - const volumeTransformer = new VolumeTransformer({ type: 's16le', volume }) - pipeline(pcmStream, volumeTransformer, (err) => { - if (err) volumeTransformer.emit('error', err) - }) - return volumeTransformer + streams.push(new VolumeTransformer({ type: 's16le', volume })) + streams.push(new FiltersManager(nodelink, filters)) + + for (const s of streams) { + if (s !== stream) { + s.on('error', (err) => + logger( + 'error', + 'PCMStream', + `Component error (${s.constructor.name}): ${err.message} (${err.code})` + ) + ) + } } - return pcmStream + pipeline(streams, (err) => { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + logger( + 'error', + 'PCMStream', + `Internal processing pipeline failed: ${err.message}` + ) + } + }) + + return streams[streams.length - 1] } diff --git a/src/sourceWorker.js b/src/sourceWorker.js new file mode 100644 index 0000000..3e2bfa0 --- /dev/null +++ b/src/sourceWorker.js @@ -0,0 +1,371 @@ +import net from 'node:net' +import os from 'node:os' +import { fileURLToPath } from 'node:url' +import { + isMainThread, + parentPort, + Worker, + workerData +} from 'node:worker_threads' +import * as utils from './utils.js' + +const __filename = fileURLToPath(import.meta.url) + +if (isMainThread) { + let config + try { + config = (await import('../config.js')).default + } catch { + config = (await import('../config.default.js')).default + } + + const specConfig = config.cluster?.specializedSourceWorker || {} + + utils.initLogger(config) + + const nodelink = { + options: config, + logger: utils.logger + } + + const threadCount = specConfig.microWorkers || Math.min(2, os.cpus().length) + const TASKS_PER_WORKER = specConfig.tasksPerWorker || 32 + const workerPool = [] + const taskQueue = [] + + nodelink.logger( + 'info', + 'SourceWorker', + `Spawning ${threadCount} micro-workers for API tasks...` + ) + + for (let i = 0; i < threadCount; i++) { + const worker = new Worker(__filename, { + workerData: { config, silentLogs: specConfig.silentLogs, threadId: i + 1 } + }) + + worker.ready = false + worker.load = 0 + + worker.on('message', (msg) => { + if (msg.type === 'ready') { + worker.ready = true + nodelink.logger( + 'info', + 'SourceWorker', + `Micro-worker ${i + 1} is ready.` + ) + processNextTask() + } else if (msg.type === 'result') { + const { socketPath, id, result, error } = msg + finishTask(socketPath, id, result, error) + + worker.load = Math.max(0, worker.load - 1) + processNextTask() + } else if (msg.type === 'stream') { + sendStreamChunk(msg.socketPath, msg.id, msg.chunk) + } else if (msg.type === 'end') { + sendStreamEnd(msg.socketPath, msg.id) + worker.load = Math.max(0, worker.load - 1) + processNextTask() + } else if (msg.type === 'error') { + sendStreamError(msg.socketPath, msg.id, msg.error) + worker.load = Math.max(0, worker.load - 1) + processNextTask() + } + }) + + workerPool.push(worker) + } + + const sockets = new Map() + + async function getSocket(path) { + if (sockets.has(path)) return sockets.get(path) + return new Promise((resolve, reject) => { + const socket = net.createConnection(path, () => { + sockets.set(path, socket) + resolve(socket) + }) + socket.on('error', reject) + socket.on('close', () => sockets.delete(path)) + }) + } + + function withSocket(path, handler) { + const socket = sockets.get(path) + if (socket) { + handler(socket) + return + } + getSocket(path) + .then(handler) + .catch((e) => { + utils.logger( + 'error', + 'SourceWorker', + `Failed to send data back: ${e.message}` + ) + }) + } + + function finishTask(socketPath, id, result, error) { + getSocket(socketPath) + .then((socket) => { + if (error) { + sendFrame(socket, id, 2, Buffer.from(error, 'utf8')) + } else { + // result is already a string + sendFrame(socket, id, 0, Buffer.from(result, 'utf8')) + sendFrame(socket, id, 1, Buffer.alloc(0)) + } + }) + .catch((e) => { + utils.logger( + 'error', + 'SourceWorker', + `Failed to send result back: ${e.message}` + ) + }) + } + + function sendStreamChunk(socketPath, id, chunk) { + const payload = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + withSocket(socketPath, (socket) => sendFrame(socket, id, 0, payload)) + } + + function sendStreamEnd(socketPath, id) { + withSocket(socketPath, (socket) => + sendFrame(socket, id, 1, Buffer.alloc(0)) + ) + } + + function sendStreamError(socketPath, id, error) { + const errorBuf = Buffer.from(String(error || 'Unknown error'), 'utf8') + withSocket(socketPath, (socket) => sendFrame(socket, id, 2, errorBuf)) + } + + function sendFrame(socket, id, type, payloadBuf) { + const idBuf = Buffer.from(id, 'utf8') + + const header = Buffer.alloc(6) + header.writeUInt8(idBuf.length, 0) + header.writeUInt8(type, 1) + header.writeUInt32BE(payloadBuf.length, 2) + + socket.write(Buffer.concat([header, idBuf, payloadBuf])) + } + + function processNextTask() { + if (taskQueue.length === 0) return + + let bestWorker = null + let minLoad = Infinity + + for (const worker of workerPool) { + if ( + worker.ready && + worker.load < TASKS_PER_WORKER && + worker.load < minLoad + ) { + bestWorker = worker + minLoad = worker.load + } + } + + if (bestWorker) { + const task = taskQueue.shift() + bestWorker.load++ + bestWorker.postMessage(task) + + if (taskQueue.length > 0) setImmediate(processNextTask) + } + } + + process.on('message', (msg) => { + if (msg.type !== 'sourceTask') return + taskQueue.push(msg.payload) + processNextTask() + }) + + process.send({ type: 'ready', pid: process.pid }) +} else { + const { config, silentLogs, threadId } = workerData + + if (silentLogs) { + config.logging = { ...config.logging, level: 'warn' } + } + utils.initLogger(config) + + const nodelink = { + options: config, + logger: utils.logger + } + + const [ + { createPCMStream }, + { default: SourceManager }, + { default: LyricsManager }, + { default: CredentialManager }, + { default: RoutePlannerManager }, + { default: StatsManager } + ] = await Promise.all([ + import('./playback/streamProcessor.js'), + import('./managers/sourceManager.js'), + import('./managers/lyricsManager.js'), + import('./managers/credentialManager.js'), + import('./managers/routePlannerManager.js'), + import('./managers/statsManager.js') + ]) + + nodelink.statsManager = new StatsManager(nodelink) + nodelink.credentialManager = new CredentialManager(nodelink) + nodelink.routePlanner = new RoutePlannerManager(nodelink) + nodelink.sources = new SourceManager(nodelink) + nodelink.lyrics = new LyricsManager(nodelink) + + await nodelink.credentialManager.load() + await nodelink.sources.loadFolder() + await nodelink.lyrics.loadFolder() + + parentPort.postMessage({ type: 'ready' }) + + const sendStreamChunkFromWorker = (id, socketPath, chunk) => { + parentPort.postMessage({ type: 'stream', id, socketPath, chunk }) + } + + const sendStreamEndFromWorker = (id, socketPath) => { + parentPort.postMessage({ type: 'end', id, socketPath }) + } + + const sendStreamErrorFromWorker = (id, socketPath, error) => { + parentPort.postMessage({ + type: 'error', + id, + socketPath, + error: String(error || 'Unknown error') + }) + } + + const handleLoadStream = async (id, socketPath, payload) => { + let fetched = null + let pcmStream = null + let finished = false + + const cleanup = () => { + if (pcmStream && !pcmStream.destroyed) pcmStream.destroy() + if (fetched?.stream && !fetched.stream.destroyed) fetched.stream.destroy() + } + + const finish = (err) => { + if (finished) return + finished = true + if (err) { + sendStreamErrorFromWorker(id, socketPath, err.message || err) + } else { + sendStreamEndFromWorker(id, socketPath) + } + cleanup() + } + + try { + const trackInfo = payload?.decodedTrackInfo + if (!trackInfo) { + throw new Error('Invalid encoded track') + } + + const urlResult = await nodelink.sources.getTrackUrl(trackInfo) + if (urlResult.exception) { + throw new Error( + urlResult.exception.message || 'Failed to get track URL' + ) + } + + const additionalData = { + ...(urlResult.additionalData || {}), + startTime: payload?.position || 0 + } + + fetched = await nodelink.sources.getTrackStream( + urlResult.newTrack?.info || trackInfo, + urlResult.url, + urlResult.protocol, + additionalData + ) + + if (fetched.exception) { + throw new Error(fetched.exception.message || 'Failed to load stream') + } + + pcmStream = createPCMStream( + fetched.stream, + fetched.type || urlResult.format, + nodelink, + (payload?.volume ?? 100) / 100, + payload?.filters || {} + ) + + pcmStream.on('data', (chunk) => { + if (!finished) sendStreamChunkFromWorker(id, socketPath, chunk) + }) + + pcmStream.once('end', () => finish()) + pcmStream.once('error', (err) => finish(err)) + pcmStream.once('close', () => finish()) + } catch (err) { + finish(err) + } + } + + parentPort.on('message', async (taskData) => { + const { id, task, payload, socketPath } = taskData + + if (task === 'loadStream') { + try { + await handleLoadStream(id, socketPath, payload) + } catch (e) { + sendStreamErrorFromWorker(id, socketPath, e.message || e) + } + return + } + + try { + let result + switch (task) { + case 'resolve': + result = await nodelink.sources.resolve(payload.url) + break + case 'search': + result = await nodelink.sources.search(payload.source, payload.query) + break + case 'unifiedSearch': + result = await nodelink.sources.unifiedSearch(payload.query) + break + case 'loadLyrics': + result = await nodelink.lyrics.loadLyrics( + { info: payload.decodedTrackInfo }, + payload.language + ) + break + case 'loadChapters': + result = await nodelink.sources.getChapters({ + info: payload.decodedTrackInfo + }) + break + } + parentPort.postMessage({ + type: 'result', + id, + socketPath, + result: JSON.stringify(result) + }) + } catch (e) { + parentPort.postMessage({ + type: 'result', + id, + socketPath, + error: e.message + }) + } + }) +} diff --git a/src/sources/amazonmusic.js b/src/sources/amazonmusic.js new file mode 100644 index 0000000..cea5f1f --- /dev/null +++ b/src/sources/amazonmusic.js @@ -0,0 +1,390 @@ +import { + encodeTrack, + getBestMatch, + http1makeRequest, + logger +} from '../utils.js' + +const BOT_USER_AGENT = + 'Mozilla/5.0 (compatible; NodeLinkBot/0.1; +https://nodelink.js.org/)' + +function parseISO8601Duration(duration) { + if (!duration) return 0 + const match = duration.match(/PT(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?/) + if (!match) return 0 + const hours = Number.parseInt(match[1] || '0', 10) + const minutes = Number.parseInt(match[2] || '0', 10) + const seconds = Number.parseInt(match[3] || '0', 10) + return (hours * 3600 + minutes * 60 + seconds) * 1000 +} + +export default class AmazonMusicSource { + constructor(nodelink) { + this.nodelink = nodelink + this.config = nodelink.options + this.patterns = [ + /https?:\/\/music\.amazon\.[a-z.]+\/(?:.*\/)?(track|album|playlist|artist)s?\/([a-z0-9]+)/i, + /https?:\/\/(?:www\.)?amazon\.[a-z.]+\/dp\/([a-z0-9]+)/i + ] + this.priority = 100 + } + + async setup() { + return true + } + + async resolve(url) { + try { + const match = url.match(this.patterns[0]) || url.match(this.patterns[1]) + if (!match) return { loadType: 'empty', data: {} } + + let [, type, id] = match + if (!id) { + id = type + type = 'track' + } + + const trackAsin = url.match(/(?:[?&]|%26)trackAsin=([a-z0-9]+)/i)?.[1] + + if (trackAsin) { + return await this._resolveTrack(url, trackAsin) + } + + switch (type) { + case 'track': + return await this._resolveTrack(url, id) + case 'album': + return await this._resolveAlbum(url, id) + case 'playlist': + return await this._resolvePlaylist(url, id) + case 'artist': + return await this._resolveArtist(url, id) + case 'dp': + return await this._resolveTrack(url, id) + default: + return { loadType: 'empty', data: {} } + } + } catch (e) { + logger('error', 'AmazonMusic', `Resolution failed: ${e.message}`) + return { + loadType: 'error', + data: { message: e.message, severity: 'fault' } + } + } + } + + async _resolveTrack(url, id) { + const data = await this._fetchJsonLd(url, id) + if (data?.loadType === 'track') return data + + return await this._fallbackToOdesli(url, id) + } + + async _resolveAlbum(url, id) { + const data = await this._fetchJsonLd(url) + if (data?.loadType === 'playlist') return data + + return await this._fallbackToOdesli(url, id) + } + + async _resolvePlaylist(url, id) { + const data = await this._fetchJsonLd(url) + if (data?.loadType === 'playlist') return data + + return await this._fallbackToOdesli(url, id) + } + + async _resolveArtist(url, id) { + const data = await this._fetchJsonLd(url) + if (data?.loadType === 'playlist') return data + + return await this._fallbackToOdesli(url, id) + } + + async _fetchJsonLd(url, targetId) { + try { + const { body, statusCode } = await http1makeRequest(url, { + headers: { 'User-Agent': BOT_USER_AGENT } + }) + if (statusCode !== 200) return null + + const headerArtist = body + .match(/]*primary-text="([^"]+)"/)?.[1] + ?.replace(/&/g, '&') + const headerImage = body.match( + /]*image-src="([^"]+)"/ + )?.[1] + const ogImageMatch = body.match( + /]*type="application\/ld\+json"[^>]*>([\s\S]*?)<\/script>/g + ) + let collection = null + let trackData = null + + for (const match of jsonLdMatches) { + try { + const content = match[1] + .replace(/"/g, '"') + .replace(/&/g, '&') + const parsed = JSON.parse(content) + const data = Array.isArray(parsed) ? parsed[0] : parsed + if ( + data['@type'] === 'MusicAlbum' || + data['@type'] === 'MusicGroup' || + data['@type'] === 'Playlist' + ) { + collection = data + } else if (data['@type'] === 'MusicRecording') { + trackData = data + } + } catch (_e) {} + } + + const tracks = [] + let collectionName = headerArtist || 'Unknown Artist' + let collectionImage = artworkUrl + + if (collection) { + const artistName = + collection.byArtist?.name || + (Array.isArray(collection.byArtist) + ? collection.byArtist[0]?.name + : null) || + collection.author?.name + if (artistName) collectionName = artistName + if (collection.image) collectionImage = collection.image + } + + if (collection?.track) { + for (const t of collection.track) { + const id = + t.url?.split('/').pop() || + t['@id']?.split('/').pop() || + `am-${Buffer.from(t.name).toString('hex')}` + tracks.push({ + identifier: id, + isSeekable: true, + author: t.byArtist?.name || t.author?.name || collectionName, + length: parseISO8601Duration(t.duration), + isStream: false, + position: 0, + title: t.name, + uri: t.url || url, + artworkUrl: collectionImage, + isrc: t.isrcCode || null, + sourceName: 'amazonmusic' + }) + } + } + + if (tracks.length === 0) { + const rowMatches = body.matchAll( + /<(music-image-row|music-text-row)[^>]*primary-text="([^"]+)"[^>]*primary-href="([^"]+)"(?:[^>]*secondary-text-1="([^"]+)")?[^>]*duration="([^"]+)"(?:[^>]*image-src="([^"]+)")?/g + ) + for (const m of rowMatches) { + const tTitle = m[2].replace(/&/g, '&') + const tHref = m[3] + const tArtist = (m[4] || collectionName).replace(/&/g, '&') + const tDuration = m[5] + const tImage = m[6] || collectionImage + const tId = + tHref.split('trackAsin=').pop().split('&')[0] || + tHref.split('/').pop() + + tracks.push({ + identifier: tId, + isSeekable: true, + author: tArtist, + length: tDuration.includes(':') + ? (parseInt(tDuration.split(':')[0], 10) * 60 + + parseInt(tDuration.split(':')[1], 10)) * + 1000 + : 0, + isStream: false, + position: 0, + title: tTitle, + uri: `https://music.amazon.com.br/tracks/${tId}`, + artworkUrl: tImage, + isrc: null, + sourceName: 'amazonmusic' + }) + } + + if (tracks.length === 0 && !headerArtist) { + const titleMatch = body.match(/]*>([^<]+)<\/title>/) + if (titleMatch) + collectionName = + titleMatch[1] + .split(' no Amazon')[0] + .split(' de ') + .pop() + ?.split(' no ')[0] || collectionName + } + } + + if (tracks.length > 0) { + if (targetId) { + const selected = tracks.find( + (t) => t.identifier === targetId || t.uri.includes(targetId) + ) + if (selected) { + return { + loadType: 'track', + data: { encoded: encodeTrack(selected), info: selected } + } + } + } + + if (url.includes('/tracks/') && !targetId) { + return { + loadType: 'track', + data: { encoded: encodeTrack(tracks[0]), info: tracks[0] } + } + } + + return { + loadType: 'playlist', + data: { + info: { name: collectionName, selectedTrack: 0 }, + tracks: tracks.map((t) => ({ encoded: encodeTrack(t), info: t })) + } + } + } + + if (trackData) { + const artist = + trackData.byArtist?.name || trackData.author?.name || 'Unknown Artist' + let trackImage = trackData.image || artworkUrl + if (!trackImage) { + const headerImageMatch = body.match( + /]*image-src="([^"]+)"/ + ) + if (headerImageMatch) trackImage = headerImageMatch[1] + } + return this._buildTrackResult( + trackData.name, + artist, + url, + trackImage, + trackData.id || trackData.isrcCode || url.split('/').pop(), + parseISO8601Duration(trackData.duration), + trackData.isrcCode + ) + } + } catch (_e) {} + return null + } + + async _fallbackToOdesli(url, targetId) { + try { + const apiUrl = `https://api.song.link/v1-alpha.1/links?url=${encodeURIComponent(url.split('?')[0])}` + const { body, statusCode } = await http1makeRequest(apiUrl) + if (statusCode === 200 && body.entitiesByUniqueId) { + let entity = body.entitiesByUniqueId[body.entityUniqueId] + if (targetId && (!entity || !entity.id.includes(targetId))) { + const found = Object.values(body.entitiesByUniqueId).find((e) => + e.id.includes(targetId) + ) + if (found) entity = found + } + if (entity) + return this._buildTrackResult( + entity.title, + entity.artistName, + url, + entity.thumbnailUrl, + entity.id, + 0, + entity.isrc + ) + } + } catch (_e) {} + return { loadType: 'empty', data: {} } + } + + _buildTrackResult(title, author, url, image, id, length = 0, isrc = null) { + const trackInfo = { + identifier: id, + isSeekable: true, + author: author?.trim() || 'Unknown Artist', + length: length, + isStream: false, + position: 0, + title: title?.trim() || 'Unknown Track', + uri: url, + artworkUrl: image || null, + isrc: isrc, + sourceName: 'amazonmusic' + } + return { + loadType: 'track', + data: { encoded: encodeTrack(trackInfo), info: trackInfo } + } + } + + async getTrackUrl(decodedTrack) { + const query = `${decodedTrack.title} ${decodedTrack.author} official audio` + + try { + let searchResult + + if (decodedTrack.isrc) { + searchResult = await this.nodelink.sources.search( + 'youtube', + `"${decodedTrack.isrc}"`, + 'ytmsearch' + ) + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = null + } + } + + if (!searchResult) { + searchResult = await this.nodelink.sources.search( + 'youtube', + query, + 'ytmsearch' + ) + } + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = await this.nodelink.sources.searchWithDefault(query) + } + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + throw new Error('No alternative stream found via default search.') + } + + const bestMatch = getBestMatch(searchResult.data, decodedTrack) + if (!bestMatch) + throw new Error('No suitable alternative stream found after filtering.') + + const streamInfo = await this.nodelink.sources.getTrackUrl(bestMatch.info) + return { newTrack: bestMatch, ...streamInfo } + } catch (e) { + logger( + 'warn', + 'AmazonMusic', + `Mirror search for "${query}" failed: ${e.message}` + ) + throw e + } + } + + async loadStream() { + return null + } +} diff --git a/src/sources/applemusic.js b/src/sources/applemusic.js index 35630b9..ce51bd7 100644 --- a/src/sources/applemusic.js +++ b/src/sources/applemusic.js @@ -1,12 +1,15 @@ -import { encodeTrack, http1makeRequest, logger } from '../utils.js' -import fs from 'node:fs/promises' import path from 'node:path' +import { + encodeTrack, + getBestMatch, + http1makeRequest, + logger +} from '../utils.js' const API_BASE = 'https://api.music.apple.com/v1' const MAX_PAGE_ITEMS = 300 -const DURATION_TOLERANCE = 0.15 const BATCH_SIZE_DEFAULT = 5 -const CACHE_VALIDITY_DAYS = 7 +const _CACHE_VALIDITY_DAYS = 7 export default class AppleMusicSource { constructor(nodelink) { @@ -60,12 +63,18 @@ export default class AppleMusicSource { return true } - const cachedToken = await this._loadTokenFromCache() + const cachedToken = this.nodelink.credentialManager.get( + 'apple_media_api_token' + ) if (cachedToken) { this.mediaApiToken = cachedToken this._parseToken(this.mediaApiToken) if (this._isTokenValid()) { - logger('info', 'AppleMusic', 'Loaded valid token from cache.') + logger( + 'info', + 'AppleMusic', + 'Loaded valid token from CredentialManager.' + ) this.tokenInitialized = true return true } @@ -77,7 +86,11 @@ export default class AppleMusicSource { this._parseToken(this.mediaApiToken) if (this._isTokenValid()) { logger('info', 'AppleMusic', 'Loaded valid token from config file.') - await this._saveTokenToCache(this.mediaApiToken) + this.nodelink.credentialManager.set( + 'apple_media_api_token', + this.mediaApiToken, + this.tokenExpiry - Date.now() + ) this.tokenInitialized = true return true } @@ -95,7 +108,11 @@ export default class AppleMusicSource { } this.mediaApiToken = newToken this._parseToken(this.mediaApiToken) - await this._saveTokenToCache(this.mediaApiToken) + this.nodelink.credentialManager.set( + 'apple_media_api_token', + this.mediaApiToken, + this.tokenExpiry - Date.now() + ) this.tokenInitialized = true return true } @@ -119,57 +136,6 @@ export default class AppleMusicSource { } } - async _loadTokenFromCache() { - try { - await fs.mkdir(path.dirname(this.tokenCachePath), { recursive: true }) - const data = await fs.readFile(this.tokenCachePath, 'utf-8') - const { token, timestamp } = JSON.parse(data) - - if (!token || !timestamp) return null - - const cacheAge = Date.now() - timestamp - const maxAge = CACHE_VALIDITY_DAYS * 24 * 60 * 60 * 1000 - - if (cacheAge > maxAge) { - logger('info', 'AppleMusic', 'Cached token has expired.') - return null - } - - return token - } catch (error) { - if (error.code !== 'ENOENT') { - logger( - 'warn', - 'AppleMusic', - `Could not read token cache: ${error.message}` - ) - } - return null - } - } - - async _saveTokenToCache(token) { - try { - await fs.mkdir(path.dirname(this.tokenCachePath), { recursive: true }) - const dataToCache = { - token: token, - timestamp: Date.now() - } - await fs.writeFile( - this.tokenCachePath, - JSON.stringify(dataToCache), - 'utf-8' - ) - logger('info', 'AppleMusic', 'Saved new token to cache file.') - } catch (error) { - logger( - 'error', - 'AppleMusic', - `Failed to save token to cache: ${error.message}` - ) - } - } - async _fetchNewToken() { try { logger( @@ -187,7 +153,7 @@ export default class AppleMusicSource { const scriptTagMatch = html.match( / 0) allowed = Math.min(pages, maxPages) + const promises = [] for (let index = 1; index < allowed; index++) { const offset = index * MAX_PAGE_ITEMS const path = `${basePath}${basePath.includes('?') ? '&' : '?'}limit=${MAX_PAGE_ITEMS}&offset=${offset}` + promises.push(this._apiRequest(path)) + } + + if (promises.length === 0) return results - const page = await this._apiRequest(path) - if (page?.data) results.push(...page.data) + const batchSize = this.playlistPageLoadConcurrency + for (let i = 0; i < promises.length; i += batchSize) { + const batch = promises.slice(i, i + batchSize) + try { + const pageResults = await Promise.all(batch) + for (const page of pageResults) { + if (page?.data) results.push(...page.data) + } + } catch (e) { + logger( + 'warn', + 'AppleMusic', + `Failed to fetch a batch of pages: ${e.message}` + ) + } } return results @@ -516,16 +499,45 @@ export default class AppleMusicSource { try { const url = new URL(decodedTrack.uri) isExplicit = url.searchParams.get('explicit') === 'true' - } catch (error) { + } catch (_error) { // Ignore malformed URI } } - const duration = decodedTrack.length const query = this._buildSearchQuery(decodedTrack, isExplicit) try { - const searchResult = await this.nodelink.sources.searchWithDefault(query) + let searchResult + + if (decodedTrack.isrc) { + searchResult = await this.nodelink.sources.search( + 'youtube', + `"${decodedTrack.isrc}"`, + 'ytmsearch' + ) + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = null + } + } + + if (!searchResult) { + searchResult = await this.nodelink.sources.search( + 'youtube', + query, + 'ytmsearch' + ) + } + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = await this.nodelink.sources.searchWithDefault(query) + } + if ( searchResult.loadType !== 'search' || searchResult.data.length === 0 @@ -535,14 +547,10 @@ export default class AppleMusicSource { } } - const bestMatch = await this._findBestMatch( - searchResult.data, - duration, - decodedTrack, - isExplicit, - this.allowExplicit, - false - ) + const bestMatch = getBestMatch(searchResult.data, decodedTrack, { + allowExplicit: this.allowExplicit + }) + if (!bestMatch) { return { exception: { message: 'No suitable match.', severity: 'fault' } @@ -563,143 +571,4 @@ export default class AppleMusicSource { } return searchQuery } - - async _findBestMatch( - list, - target, - original, - isExplicit, - allowExplicit, - retried = false - ) { - const allowedDurationDiff = target * DURATION_TOLERANCE - const normalizedOriginalTitle = this._normalize(original.title) - const normalizedOriginalAuthor = this._normalize(original.author) - - const scoredCandidates = list - .filter( - (item) => Math.abs(item.info.length - target) <= allowedDurationDiff - ) - .map((item) => { - const normalizedItemTitle = this._normalize(item.info.title) - const normalizedItemAuthor = this._normalize(item.info.author) - let score = 0 - - const originalTitleWords = new Set( - normalizedOriginalTitle.split(' ').filter((w) => w.length > 0) - ) - const itemTitleWords = new Set( - normalizedItemTitle.split(' ').filter((w) => w.length > 0) - ) - - let titleScore = 0 - for (const word of originalTitleWords) { - if (itemTitleWords.has(word)) { - titleScore++ - } - } - score += titleScore * 100 - - const authorSimilarity = this._calculateSimilarity( - normalizedOriginalAuthor, - normalizedItemAuthor - ) - score += authorSimilarity * 100 - - const titleWords = new Set(normalizedItemTitle.split(' ')) - const originalTitleWordsSet = new Set( - normalizedOriginalTitle.split(' ') - ) - const extraWords = [...titleWords].filter( - (word) => !originalTitleWordsSet.has(word) - ) - score -= extraWords.length * 5 - - const isCleanOrRadio = - normalizedItemTitle.includes('clean') || - normalizedItemTitle.includes('radio') - - if (isExplicit && !allowExplicit) { - if (isCleanOrRadio) { - score += 500 - } - } else if (!isExplicit) { - if (isCleanOrRadio) { - score -= 200 - } - } else { - if (isCleanOrRadio) { - score -= 200 - } - } - - return { item, score } - }) - .filter((c) => c.score >= 0) - - if (scoredCandidates.length === 0 && !retried) { - const newSearch = await this.nodelink.sources.searchWithDefault( - `${original.title} ${original.author} official video` - ) - if (newSearch.loadType !== 'search' || newSearch.data.length === 0) { - return null - } - - return await this._findBestMatch( - newSearch.data, - target, - original, - isExplicit, - allowExplicit, - true - ) - } - - if (scoredCandidates.length === 0) { - return null - } - - scoredCandidates.sort((a, b) => b.score - a.score) - - return scoredCandidates[0].item - } - - _normalize(text) { - if (!text) return '' - return text - .toLowerCase() - .replace(/feat\.?/g, '') - .replace(/ft\.?/g, '') - .replace(/[^\w\s]/g, '') - .trim() - } - - _calculateSimilarity(string1, string2) { - if (!string1.length && !string2.length) return 1 - const longerString = string1.length > string2.length ? string1 : string2 - const shorterString = string1.length > string2.length ? string2 : string1 - const distance = this._levenshteinDistance(string1, string2) - return (longerString.length - distance) / longerString.length - } - - _levenshteinDistance(string1, string2) { - const matrix = [] - for (let i = 0; i <= string2.length; i++) matrix[i] = [i] - for (let j = 0; j <= string1.length; j++) matrix[0][j] = j - - for (let i = 1; i <= string2.length; i++) { - for (let j = 1; j <= string1.length; j++) { - matrix[i][j] = - string1[j - 1] === string2[i - 1] - ? matrix[i - 1][j - 1] - : Math.min( - matrix[i - 1][j - 1] + 1, - matrix[i][j - 1] + 1, - matrix[i - 1][j] + 1 - ) - } - } - - return matrix[string2.length][string1.length] - } } diff --git a/src/sources/audiomack.js b/src/sources/audiomack.js new file mode 100644 index 0000000..d5f5b05 --- /dev/null +++ b/src/sources/audiomack.js @@ -0,0 +1,419 @@ +import { http1makeRequest, logger, encodeTrack } from '../utils.js' +import crypto from 'node:crypto' +import { PassThrough } from 'node:stream' + +const audiomackPatterns = [ + /https?:\/\/(?:www\.)?audiomack\.com\/[^/]+\/song\/[^/]+(?:\?.*)?$/i, + /https?:\/\/(?:www\.)?audiomack\.com\/[^/]+\/album\/[^/]+(?:\?.*)?$/i, + /https?:\/\/(?:www\.)?audiomack\.com\/[^/]+\/playlist\/[^/]+(?:\?.*)?$/i, + /https?:\/\/(?:www\.)?audiomack\.com\/[^/]+(?:\/)?(?:\?.*)?$/i, + /https?:\/\/(?:www\.)?audiomack\.com\/search(?:\?.*)?$/i +] + +const API_BASE = 'https://api.audiomack.com/v1' +const CONSUMER_KEY = 'audiomack-web' +const CONSUMER_SECRET = 'bd8a07e9f23fbe9d808646b730f89b8e' + +const STRICT_URI_RE = /[!'()*]/g +function strictEncodeURIComponent(str) { + return encodeURIComponent(String(str)).replace( + STRICT_URI_RE, + (c) => '%' + c.charCodeAt(0).toString(16).toUpperCase() + ) +} + +function buildParamString(params) { + return Object.keys(params) + .sort() + .map( + (k) => + `${strictEncodeURIComponent(k)}=${strictEncodeURIComponent(params[k])}` + ) + .join('&') +} + +function parseJsonBody(body) { + if (!body) return null + if (typeof body !== 'string') return body + try { + return JSON.parse(body) + } catch { + return null + } +} + +function normalizeApiResult(json) { + if (!json) return null + let data = json.results ?? json.result ?? json + if (Array.isArray(data)) data = data[0] + return data || null +} + +function getUrlExtension(u) { + if (!u || typeof u !== 'string') return '' + try { + const p = new URL(u).pathname + const i = p.lastIndexOf('.') + return i === -1 ? '' : p.slice(i + 1).toLowerCase() + } catch { + const base = u.split('?')[0] + const i = base.lastIndexOf('.') + return i === -1 ? '' : base.slice(i + 1).toLowerCase() + } +} + +function guessFormatFromUrl(u) { + const ext = getUrlExtension(u) + if ( + ext === 'mp3' || + ext === 'm4a' || + ext === 'mp4' || + ext === 'aac' || + ext === 'ogg' || + ext === 'wav' || + ext === 'flac' || + ext === 'webm' || + ext === 'flv' + ) + return ext === 'mp4' ? 'm4a' : ext + return 'm4a' +} + +function coerceStreamType(typeOrFormat, url) { + const t = typeOrFormat ? String(typeOrFormat).toLowerCase() : '' + if (t) { + if (t.includes('/')) return t + if (t === 'mp3' || t === 'mpeg') return 'audio/mpeg' + if (t === 'm4a' || t === 'mp4') return 'audio/mp4' + if (t === 'aac') return 'audio/aac' + if (t === 'ogg') return 'audio/ogg' + if (t === 'wav') return 'audio/wav' + if (t === 'flac') return 'audio/flac' + if (t === 'webm') return 'video/webm' + if (t === 'flv') return 'video/x-flv' + return t + } + + const ext = guessFormatFromUrl(url) + if (ext === 'mp3') return 'audio/mpeg' + if (ext === 'm4a' || ext === 'mp4') return 'audio/mp4' + if (ext === 'aac') return 'audio/aac' + if (ext === 'ogg') return 'audio/ogg' + if (ext === 'wav') return 'audio/wav' + if (ext === 'flac') return 'audio/flac' + if (ext === 'webm') return 'video/webm' + if (ext === 'flv') return 'video/x-flv' + return 'audio/mp4' +} + +export default class AudioMackSource { + constructor(nodelink) { + this.nodelink = nodelink + this.config = nodelink.options + this.searchTerms = ['admsearch', 'audiomack'] + this.patterns = audiomackPatterns + this.priority = 40 + } + + async setup() { + logger('info', 'Sources', 'Loaded Audiomack source (official public API).') + return true + } + + async makeSignedRequest(method, url, additionalParams = {}) { + const params = { + ...additionalParams, + oauth_consumer_key: CONSUMER_KEY, + oauth_nonce: crypto.randomBytes(16).toString('hex'), + oauth_signature_method: 'HMAC-SHA1', + oauth_timestamp: Math.floor(Date.now() / 1000), + oauth_version: '1.0' + } + + const paramString = buildParamString(params) + const signature = this.generateSignature( + method, + url, + params, + CONSUMER_SECRET, + paramString + ) + const signedUrl = `${url}?${paramString}&oauth_signature=${strictEncodeURIComponent(signature)}` + return http1makeRequest(signedUrl, { method }) + } + + async search(query, _sourceTerm) { + logger('debug', 'Sources', `Searching Audiomack for: "${query}"`) + + try { + const url = `${API_BASE}/search` + const { body, error } = await this.makeSignedRequest('GET', url, { + q: query, + limit: '20', + show: 'music', + sort: 'popular', + page: '1', + section: '/search' + }) + + if (error || !body) { + logger( + 'error', + 'Sources', + `[Audiomack] API search failed: ${error?.message}` + ) + return { + exception: { + message: error?.message || 'Failed to fetch search results.', + severity: 'common' + } + } + } + + const json = parseJsonBody(body) + if (!json) { + logger( + 'error', + 'Sources', + '[Audiomack] Invalid JSON in search response.' + ) + return { loadType: 'empty', data: {} } + } + + if (Array.isArray(json.results)) { + const tracks = json.results + .filter((item) => item?.type === 'song') + .map((track) => this.buildTrack(track)) + + logger('debug', 'Sources', `[Audiomack] Found ${tracks.length} tracks.`) + if (!tracks.length) return { loadType: 'empty', data: {} } + return { loadType: 'search', data: tracks } + } + + logger('debug', 'Sources', '[Audiomack] No results found in response.') + return { loadType: 'empty', data: {} } + } catch (e) { + logger('error', 'Sources', `[Audiomack] search error: ${e.message}`) + return { exception: { message: 'Failed to search.', severity: 'common' } } + } + } + + async resolve(queryUrl) { + const url = new URL(queryUrl) + const pathParts = url.pathname.slice(1).split('/') + + const artistSlug = pathParts[0] + const songSlug = pathParts.length > 2 ? pathParts.slice(2).join('/') : null + + if (!songSlug) { + return { + exception: { + message: 'Only single song URLs are currently supported.', + severity: 'common' + } + } + } + + const apiUrl = `${API_BASE}/music/song/${artistSlug}/${songSlug}` + + try { + const { body, error } = await this.makeSignedRequest('GET', apiUrl, { + section: url.pathname + }) + + if (error || !body) { + return { + exception: { + message: + error?.message || + 'Failed to fetch track details from Audiomack API.', + severity: 'common' + } + } + } + + const json = parseJsonBody(body) + const song = normalizeApiResult(json) + + if (!song?.id) { + return { + exception: { + message: 'Track not found or invalid response.', + severity: 'common' + } + } + } + + return { loadType: 'track', data: this.buildTrack(song, queryUrl) } + } catch (e) { + return { + exception: { + message: 'Failed to resolve track: ' + e.message, + severity: 'common' + } + } + } + } + + async getTrackUrl(track) { + if (!track.identifier) { + return { + exception: { + message: 'Track identifier (numeric ID) missing', + severity: 'fault', + cause: 'StreamLink' + } + } + } + + const playUrl = `${API_BASE}/music/play/${track.identifier}` + + try { + let section = '/search' + if (track.uri) { + try { + section = new URL(track.uri).pathname + } catch {} + } + + const { body, error } = await this.makeSignedRequest('GET', playUrl, { + environment: 'desktop-web', + hq: 'true', + section + }) + + if (error || !body) { + return { + exception: { + message: + error?.message || 'Failed to get playback URL from Audiomack API', + severity: 'fault', + cause: 'StreamLink' + } + } + } + + const json = parseJsonBody(body) + const data = normalizeApiResult(json) + if (!data) { + return { + exception: { + message: 'Invalid response from Audiomack API', + severity: 'fault', + cause: 'StreamLink' + } + } + } + + const streamUrl = + data.signedUrl || + data.signed_url || + data.url || + data.streamUrl || + data.stream_url + + if (!streamUrl) { + return { + exception: { + message: 'Invalid or missing streaming URL in response', + severity: 'fault', + cause: 'StreamLink' + } + } + } + + const format = guessFormatFromUrl(streamUrl) + return { url: streamUrl, protocol: 'https', format } + } catch (e) { + return { + exception: { + message: e.message || 'Failed to get playback URL from Audiomack API', + severity: 'fault', + cause: 'StreamLink' + } + } + } + } + + async loadStream(decodedTrack, url, _protocol, additionalData) { + try { + const res = await http1makeRequest(url, { + method: 'GET', + headers: additionalData?.headers || {}, + streamOnly: true + }) + + if (res.error || !res.stream) + throw res.error || new Error('Failed to get stream') + + const out = new PassThrough() + const src = res.stream + + src.pipe(out) + + src.once('error', (err) => out.destroy(err)) + out.once('close', () => src.destroy()) + out.once('error', () => src.destroy()) + out.once('end', () => out.emit('finishBuffering')) + + const streamType = coerceStreamType( + additionalData?.type || additionalData?.format || decodedTrack?.format, + url + ) + + return { stream: out, type: streamType } + } catch (err) { + return { exception: { message: err.message, severity: 'common' } } + } + } + + generateSignature( + method, + url, + params, + secret, + paramString = buildParamString(params) + ) { + const signatureBase = `${method.toUpperCase()}&${strictEncodeURIComponent(url)}&${strictEncodeURIComponent(paramString)}` + const signingKey = `${strictEncodeURIComponent(secret)}&` + return crypto + .createHmac('sha1', signingKey) + .update(signatureBase) + .digest('base64') + } + + buildTrack(item, queryUrl = null) { + const id = item.id + const title = item.title || 'Unknown Title' + const author = item.artist || item.uploader?.name || 'Unknown Artist' + const duration = item.duration ? parseInt(item.duration, 10) * 1000 : 0 + const artwork = item.image || item.image_base || null + + let uri = queryUrl + if (!uri) { + const uploaderSlug = + item.uploader?.url_slug || + item.uploader_url_slug || + item.artist_slug || + 'unknown' + const songSlug = item.url_slug || item.slug || '' + uri = `https://audiomack.com/${uploaderSlug}/song/${songSlug}` + } + + const trackInfo = { + identifier: String(id), + title, + author, + length: duration, + sourceName: 'audiomack', + artworkUrl: artwork, + uri, + isStream: false, + isSeekable: true, + position: 0, + isrc: item.isrc || null + } + + return { encoded: encodeTrack(trackInfo), info: trackInfo, pluginInfo: {} } + } +} diff --git a/src/sources/bilibili.js b/src/sources/bilibili.js new file mode 100644 index 0000000..f31d735 --- /dev/null +++ b/src/sources/bilibili.js @@ -0,0 +1,793 @@ +import crypto from 'node:crypto' +import { PassThrough } from 'node:stream' +import { encodeTrack, http1makeRequest, logger, makeRequest } from '../utils.js' + +const MIXIN_KEY_ENC_TAB = [ + 46, 47, 18, 2, 53, 8, 23, 32, 15, 50, 10, 31, 58, 3, 45, 35, 27, 43, 5, 49, + 33, 9, 42, 19, 29, 28, 14, 39, 12, 38, 41, 13, 37, 48, 7, 16, 24, 55, 40, 61, + 26, 17, 0, 1, 60, 51, 30, 4, 22, 25, 54, 21, 56, 59, 6, 63, 57, 62, 11, 36, + 20, 34, 44, 52 +] + +const HEADERS = { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', + Referer: 'https://www.bilibili.com/' +} + +export default class BilibiliSource { + constructor(nodelink) { + this.nodelink = nodelink + this.patterns = [ + /https?:\/\/(?:www\.)?bilibili\.com\/video\/(BV[a-zA-Z0-9]+|av\d+)/, + /https?:\/\/(?:www\.)?bilibili\.com\/bangumi\/play\/(ep|ss)(\d+)/, + /https?:\/\/(?:www\.)?bilibili\.com\/audio\/(au|am)(\d+)/, + /https?:\/\/live\.bilibili\.com\/(\d+)/, + /https?:\/\/space\.bilibili\.com\/(\d+)/ + ] + this.searchTerms = ['bilibili'] + this.priority = 100 + this.wbiKeys = null + this.wbiKeysExpiry = 0 + this.cookie = this.nodelink.options.sources?.bilibili?.sessdata + ? `SESSDATA=${this.nodelink.options.sources.bilibili.sessdata}` + : '' + } + + async setup() { + logger( + 'info', + 'Sources', + 'Loaded Bilibili source (Video, Audio, Live, Space, Lyrics, Login).' + ) + return true + } + + async _getWbiKeys() { + if (this.wbiKeys && Date.now() < this.wbiKeysExpiry) { + return this.wbiKeys + } + + const cachedKeys = this.nodelink.credentialManager.get('bilibili_wbi_keys') + if (cachedKeys) { + this.wbiKeys = cachedKeys + this.wbiKeysExpiry = Date.now() + 1000 * 60 * 60 + return this.wbiKeys + } + + const { body, error } = await makeRequest( + 'https://api.bilibili.com/x/web-interface/nav', + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + + if (error || !body?.data?.wbi_img) { + throw new Error('Failed to fetch WBI keys') + } + + const { img_url, sub_url } = body.data.wbi_img + const imgKey = img_url.slice( + img_url.lastIndexOf('/') + 1, + img_url.lastIndexOf('.') + ) + const subKey = sub_url.slice( + sub_url.lastIndexOf('/') + 1, + sub_url.lastIndexOf('.') + ) + + const rawKey = imgKey + subKey + let mixinKey = '' + for (const index of MIXIN_KEY_ENC_TAB) { + if (rawKey[index]) mixinKey += rawKey[index] + } + + this.wbiKeys = mixinKey.slice(0, 32) + this.wbiKeysExpiry = Date.now() + 1000 * 60 * 60 + this.nodelink.credentialManager.set( + 'bilibili_wbi_keys', + this.wbiKeys, + 1000 * 60 * 60 + ) + + return this.wbiKeys + } + + _signWbi(params, mixinKey) { + const currTime = Math.round(Date.now() / 1000) + const newParams = { ...params, wts: currTime } + + const query = Object.keys(newParams) + .sort() + .map((key) => { + const value = newParams[key].toString().replace(/[!'()*]/g, '') + return `${encodeURIComponent(key)}=${encodeURIComponent(value)}` + }) + .join('&') + + const w_rid = crypto + .createHash('md5') + .update(query + mixinKey) + .digest('hex') + + return `${query}&w_rid=${w_rid}` + } + + async search(query) { + try { + let body + let _error + + const searchResponse = await makeRequest( + `https://api.bilibili.com/x/web-interface/search/type?search_type=video&keyword=${encodeURIComponent(query)}`, + { + method: 'GET', + headers: { + ...HEADERS, + Cookie: this.cookie, + Referer: 'https://search.bilibili.com/' + } + } + ) + body = searchResponse.body + _error = searchResponse.error + + if ( + !body?.data?.result || + !Array.isArray(body.data.result) || + body.data.result.length === 0 + ) { + const allSearchResponse = await makeRequest( + `https://api.bilibili.com/x/web-interface/search/all/v2?keyword=${encodeURIComponent(query)}`, + { + method: 'GET', + headers: { + ...HEADERS, + Cookie: this.cookie, + Referer: 'https://search.bilibili.com/' + } + } + ) + body = allSearchResponse.body + _error = allSearchResponse.error + } + + const results = body?.data?.result || [] + let videos = [] + + if (results.length > 0) { + if (results[0].type === 'video') { + videos = results + } else { + const videoSection = results.find((r) => r.result_type === 'video') + if (videoSection?.data) { + videos = videoSection.data + } + } + } + + if (!videos || videos.length === 0) { + return { loadType: 'empty', data: {} } + } + + const tracks = [] + for (const item of videos) { + const durationParts = item.duration.split(':').map(Number) + let durationMs = 0 + if (durationParts.length === 2) + durationMs = (durationParts[0] * 60 + durationParts[1]) * 1000 + else if (durationParts.length === 3) + durationMs = + (durationParts[0] * 3600 + + durationParts[1] * 60 + + durationParts[2]) * + 1000 + + const trackInfo = { + identifier: item.bvid, + isSeekable: true, + author: item.author, + length: durationMs, + isStream: false, + position: 0, + title: item.title.replace(/<[^>]*>/g, ''), + uri: item.arcurl, + artworkUrl: item.pic.startsWith('//') + ? `https:${item.pic}` + : item.pic, + isrc: null, + sourceName: 'bilibili' + } + + tracks.push({ + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { aid: item.aid, cid: item.cid || 0 } + }) + } + + return { loadType: 'search', data: tracks } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async resolve(url) { + const videoMatch = url.match(this.patterns[0]) + if (videoMatch) { + const bvidOrAvid = videoMatch[1] + try { + let apiUrl = `https://api.bilibili.com/x/web-interface/view?` + if (bvidOrAvid.startsWith('BV')) { + apiUrl += `bvid=${bvidOrAvid}` + } else { + apiUrl += `aid=${bvidOrAvid.substring(2)}` + } + + const { body } = await makeRequest(apiUrl, { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + }) + + if (body.code !== 0) { + const errorMsg = + body.message === '啥都木有' + ? 'Video not found or deleted' + : body.message + throw new Error(`API Error: ${errorMsg}`) + } + + const data = body.data + const trackInfo = { + identifier: data.bvid, + isSeekable: true, + author: data.owner.name, + length: data.duration * 1000, + isStream: false, + position: 0, + title: data.title, + uri: `https://www.bilibili.com/video/${data.bvid}`, + artworkUrl: data.pic, + isrc: null, + sourceName: 'bilibili' + } + + if (data.pages && data.pages.length > 1) { + const tracks = data.pages.map((page) => { + const pageTrack = { ...trackInfo } + pageTrack.title = `${data.title} - ${page.part}` + pageTrack.length = page.duration * 1000 + pageTrack.identifier = `${data.bvid}?p=${page.page}` + pageTrack.uri = `https://www.bilibili.com/video/${data.bvid}?p=${page.page}` + + return { + encoded: encodeTrack(pageTrack), + info: pageTrack, + pluginInfo: { aid: data.aid, cid: page.cid, bvid: data.bvid } + } + }) + + return { + loadType: 'playlist', + data: { + info: { name: data.title, selectedTrack: 0 }, + tracks + } + } + } + + return { + loadType: 'track', + data: { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { aid: data.aid, cid: data.cid, bvid: data.bvid } + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + const bangumiMatch = url.match(this.patterns[1]) + if (bangumiMatch) { + const type = bangumiMatch[1] + const id = bangumiMatch[2] + + try { + let apiUrl + if (type === 'ep') { + apiUrl = `https://api.bilibili.com/pgc/view/web/season?ep_id=${id}` + } else { + apiUrl = `https://api.bilibili.com/pgc/view/web/season?season_id=${id}` + } + + const { body } = await makeRequest(apiUrl, { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + }) + + if (body.code !== 0) + throw new Error(`Bangumi API Error: ${body.message}`) + + const result = body.result + const tracks = [] + + for (const ep of result.episodes) { + const trackInfo = { + identifier: `ep${ep.id}`, + isSeekable: true, + author: result.season_title, + length: ep.duration, + isStream: false, + position: 0, + title: ep.long_title ? `${ep.title} - ${ep.long_title}` : ep.title, + uri: ep.link, + artworkUrl: ep.cover, + isrc: null, + sourceName: 'bilibili' + } + + tracks.push({ + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { + aid: ep.aid, + cid: ep.cid, + ep_id: ep.id, + bvid: ep.bvid + } + }) + } + + if (type === 'ep') { + const target = tracks.find((t) => t.pluginInfo.ep_id === id) + if (target) { + return { + loadType: 'track', + data: target + } + } + } + + return { + loadType: 'playlist', + data: { + info: { name: result.season_title, selectedTrack: 0 }, + tracks + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + const audioMatch = url.match(this.patterns[2]) + if (audioMatch) { + const type = audioMatch[1] + const id = audioMatch[2] + try { + if (type === 'au') { + const { body } = await makeRequest( + `https://www.bilibili.com/audio/music-service-c/web/song/info?sid=${id}`, + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + + if (body.code !== 0) throw new Error(`Audio API Error: ${body.msg}`) + + const data = body.data + const trackInfo = { + identifier: `au${data.id}`, + isSeekable: true, + author: data.uname, + length: data.duration * 1000, + isStream: false, + position: 0, + title: data.title, + uri: `https://www.bilibili.com/audio/au${data.id}`, + artworkUrl: data.cover, + isrc: null, + sourceName: 'bilibili' + } + + return { + loadType: 'track', + data: { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { sid: data.id, type: 'audio' } + } + } + } else { + const { body } = await makeRequest( + `https://www.bilibili.com/audio/music-service-c/web/song/of-menu?sid=${id}&pn=1&ps=100`, + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + + if (body.code !== 0) throw new Error(`Album API Error: ${body.msg}`) + + const tracks = body.data.data.map((song) => { + const trackInfo = { + identifier: `au${song.id}`, + isSeekable: true, + author: song.uname, + length: song.duration * 1000, + isStream: false, + position: 0, + title: song.title, + uri: `https://www.bilibili.com/audio/au${song.id}`, + artworkUrl: song.cover, + isrc: null, + sourceName: 'bilibili' + } + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { sid: song.id, type: 'audio' } + } + }) + + const { body: infoBody } = await makeRequest( + `https://www.bilibili.com/audio/music-service-c/web/menu/info?sid=${id}`, + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + + return { + loadType: 'playlist', + data: { + info: { + name: infoBody?.data?.title || 'Bilibili Album', + selectedTrack: 0 + }, + tracks + } + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + const liveMatch = url.match(this.patterns[3]) + if (liveMatch) { + const id = liveMatch[1] + try { + const { body } = await makeRequest( + `https://api.live.bilibili.com/room/v1/Room/get_info?room_id=${id}`, + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + + if (body.code !== 0) throw new Error(`Live API Error: ${body.msg}`) + + const data = body.data + if (data.live_status !== 1) throw new Error('Room is not live') + + const trackInfo = { + identifier: `live${data.room_id}`, + isSeekable: false, + author: `Room ${data.room_id}`, + length: 0, + isStream: true, + position: 0, + title: data.title, + uri: `https://live.bilibili.com/${data.room_id}`, + artworkUrl: data.user_cover, + isrc: null, + sourceName: 'bilibili' + } + + return { + loadType: 'track', + data: { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { room_id: data.room_id, type: 'live' } + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + const spaceMatch = url.match(this.patterns[4]) + if (spaceMatch) { + const mid = spaceMatch[1] + try { + const mixinKey = await this._getWbiKeys() + const query = this._signWbi( + { + mid: mid, + ps: 30, + tid: 0, + keyword: '', + order: 'pubdate' + }, + mixinKey + ) + + const { body } = await makeRequest( + `https://api.bilibili.com/x/space/wbi/arc/search?${query}`, + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + + if (body.code !== 0) throw new Error(`Space API Error: ${body.message}`) + + const list = body.data?.list?.vlist + if (!list || list.length === 0) return { loadType: 'empty', data: {} } + + const tracks = list.map((item) => { + const durationParts = item.length.split(':').map(Number) + let durationMs = 0 + if (durationParts.length === 2) + durationMs = (durationParts[0] * 60 + durationParts[1]) * 1000 + else if (durationParts.length === 3) + durationMs = + (durationParts[0] * 3600 + + durationParts[1] * 60 + + durationParts[2]) * + 1000 + + const trackInfo = { + identifier: item.bvid, + isSeekable: true, + author: item.author, + length: durationMs, + isStream: false, + position: 0, + title: item.title, + uri: `https://www.bilibili.com/video/${item.bvid}`, + artworkUrl: item.pic, + isrc: null, + sourceName: 'bilibili' + } + + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { aid: item.aid, bvid: item.bvid, cid: 0 } + } + }) + + return { + loadType: 'playlist', + data: { + info: { name: `Uploads by ${list[0].author}`, selectedTrack: 0 }, + tracks + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + return { loadType: 'empty', data: {} } + } + + async getTrackUrl(track) { + try { + const isAudio = + track.pluginInfo?.type === 'audio' || track.identifier.startsWith('au') + const isLive = + track.pluginInfo?.type === 'live' || track.identifier.startsWith('live') + + if (isAudio) { + const sid = track.pluginInfo?.sid || track.identifier.replace('au', '') + const { body } = await makeRequest( + `https://www.bilibili.com/audio/music-service-c/web/url?sid=${sid}`, + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + if (body.code !== 0 || !body.data.cdns) + throw new Error('Failed to get audio stream') + + return { + url: body.data.cdns[0], + protocol: 'https', + format: 'mp3' + } + } + + if (isLive) { + const roomId = + track.pluginInfo?.room_id || track.identifier.replace('live', '') + + const { body } = await makeRequest( + `https://api.live.bilibili.com/xlive/web-room/v2/index/getRoomPlayInfo?room_id=${roomId}&protocol=0,1&format=0,2&codec=0,1&qn=10000&platform=web&pt=web&no_playurl=0&mask=0`, + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + + if (body.code !== 0 || !body.data?.playurl_info) + throw new Error('Failed to get live stream info') + + const streams = body.data.playurl_info.playurl.stream + let targetFormat = null + let formatType = 'flv' + let protocol = 'http' + + for (const stream of streams) { + if (stream.protocol_name === 'http_stream') { + const fmt = stream.format.find((f) => f.format_name === 'flv') + if (fmt?.codec && fmt.codec.length > 0) { + targetFormat = fmt.codec[0] + formatType = 'flv' + protocol = 'http' + break + } + } + } + + if (!targetFormat) { + for (const stream of streams) { + const fmt = stream.format[0] + if (fmt?.codec && fmt.codec.length > 0) { + targetFormat = fmt.codec[0] + formatType = fmt.format_name === 'ts' ? 'mpegts' : fmt.format_name + protocol = stream.protocol_name === 'http_hls' ? 'hls' : 'http' + break + } + } + } + + if (targetFormat) { + const urlInfo = targetFormat.url_info[0] + return { + url: `${urlInfo.host}${targetFormat.base_url}${urlInfo.extra}`, + protocol: protocol, + format: formatType, + additionalData: { + headers: { + ...HEADERS, + Cookie: this.cookie, + Referer: `https://live.bilibili.com/${roomId}` + } + } + } + } + + throw new Error('No supported stream format found') + } + + let _aid = track.pluginInfo?.aid + let cid = track.pluginInfo?.cid + const bvid = track.pluginInfo?.bvid || track.identifier.split('?')[0] + + if (!cid) { + const { body } = await makeRequest( + `https://api.bilibili.com/x/web-interface/view?bvid=${bvid}`, + { + method: 'GET', + headers: { ...HEADERS, Cookie: this.cookie } + } + ) + if (body.code !== 0) + throw new Error('Failed to fetch video metadata for stream') + + _aid = body.data.aid + + const pMatch = track.identifier.match(/\?p=(\d+)/) + const pageIndex = pMatch ? parseInt(pMatch[1], 10) : 1 + const page = body.data.pages.find((p) => p.page === pageIndex) + cid = page ? page.cid : body.data.cid + } + + const mixinKey = await this._getWbiKeys() + const query = this._signWbi( + { + bvid: bvid, + cid: cid, + qn: 120, + fnval: 16 + }, + mixinKey + ) + + const { body } = await makeRequest( + `https://api.bilibili.com/x/player/wbi/playurl?${query}`, + { + method: 'GET', + headers: { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', + Referer: 'https://www.bilibili.com/', + Cookie: this.cookie + } + } + ) + + if (body.code !== 0) throw new Error(`Playurl API Error: ${body.message}`) + + const durl = body.data.durl + const dash = body.data.dash + + let url = null + let type = 'mp4' + + if (dash) { + const audio = dash.audio ? dash.audio[0] : null + const video = dash.video ? dash.video[0] : null + + if (audio) { + url = audio.base_url || audio.backup_url?.[0] + type = 'm4a' + } else if (video) { + url = video.base_url || video.backup_url?.[0] + type = 'mp4' + } + } else if (durl && durl.length > 0) { + url = durl[0].url + type = 'mp4' + } + + if (!url) throw new Error('No playable stream found') + + return { + url: url, + protocol: 'https', + format: type, + additionalData: { + headers: { + Referer: 'https://www.bilibili.com/', + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', + Cookie: this.cookie + } + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async loadStream(decodedTrack, url, _protocol, additionalData) { + try { + let type = decodedTrack.format + + if (!type) { + if (url.includes('.m3u8')) type = 'mpegts' + else if (url.includes('.flv')) type = 'flv' + else type = 'mp4' + } + + const response = await http1makeRequest(url, { + method: 'GET', + headers: additionalData?.headers || {}, + streamOnly: true + }) + + if (response.error || !response.stream) { + throw response.error || new Error('Failed to get stream') + } + + const stream = new PassThrough() + + response.stream.on('data', (chunk) => stream.write(chunk)) + response.stream.on('end', () => stream.emit('finishBuffering')) + response.stream.on('error', (err) => stream.destroy(err)) + + return { stream: stream, type: type } + } catch (err) { + return { exception: { message: err.message, severity: 'common' } } + } + } +} diff --git a/src/sources/deezer.js b/src/sources/deezer.js index be711ce..44e8888 100644 --- a/src/sources/deezer.js +++ b/src/sources/deezer.js @@ -1,7 +1,7 @@ import { Buffer } from 'node:buffer' import crypto from 'node:crypto' import { PassThrough } from 'node:stream' -import { encodeTrack, logger, makeRequest, http1makeRequest } from '../utils.js' +import { encodeTrack, http1makeRequest, logger, makeRequest } from '../utils.js' const IV = Buffer.from([0, 1, 2, 3, 4, 5, 6, 7]) @@ -10,6 +10,7 @@ export default class DeezerSource { this.nodelink = nodelink this.config = nodelink.options this.searchTerms = ['dzsearch'] + this.recommendationTerm = ['dzrec'] this.patterns = [ /^https?:\/\/(?:www\.)?deezer\.com\/(?:[a-z]+(?:-[a-z]+)?\/)?(track|album|playlist|artist)\/(\d+)$/, /^https?:\/\/link\.deezer\.com\/s\/([a-zA-Z0-9]+)/ @@ -24,6 +25,24 @@ export default class DeezerSource { async setup() { logger('info', 'Sources', 'Initializing Deezer source...') + const cachedCsrf = this.nodelink.credentialManager.get('deezer_csrf_token') + const cachedLicense = this.nodelink.credentialManager.get( + 'deezer_license_token' + ) + const cachedCookie = this.nodelink.credentialManager.get('deezer_cookie') + + if (cachedCsrf && cachedLicense && cachedCookie) { + this.csrfToken = cachedCsrf + this.licenseToken = cachedLicense + this.cookie = cachedCookie + logger( + 'info', + 'Sources', + 'Loaded Deezer credentials from CredentialManager.' + ) + return true + } + try { let initialCookie = '' const arl = this.config.sources?.deezer?.arl @@ -58,6 +77,22 @@ export default class DeezerSource { this.csrfToken = userDataRes.body.results.checkForm this.licenseToken = userDataRes.body.results.USER.OPTIONS.license_token + this.nodelink.credentialManager.set( + 'deezer_csrf_token', + this.csrfToken, + 24 * 60 * 60 * 1000 + ) + this.nodelink.credentialManager.set( + 'deezer_license_token', + this.licenseToken, + 24 * 60 * 60 * 1000 + ) + this.nodelink.credentialManager.set( + 'deezer_cookie', + this.cookie, + 24 * 60 * 60 * 1000 + ) + if (!this.csrfToken || !this.licenseToken) { throw new Error('CSRF Token or License Token not found in response.') } @@ -70,7 +105,11 @@ export default class DeezerSource { } } - async search(query) { + async search(query, sourceTerm) { + if (this.recommendationTerm.includes(sourceTerm)) { + return this.getRecommendations(query) + } + logger('debug', 'Sources', `Searching Deezer for: "${query}"`) const { body, error } = await makeRequest( @@ -99,6 +138,73 @@ export default class DeezerSource { return { loadType: 'search', data: tracks } } + async getRecommendations(query) { + try { + let method = 'song.getSearchTrackMix' + let payload = { sng_id: query, start_with_input_track: 'true' } + + if (query.startsWith('artist=')) { + method = 'song.getSmartRadio' + payload = { art_id: query.split('=')[1] } + } else if (query.startsWith('track=')) { + payload.sng_id = query.split('=')[1] + } else if (!/^\d+$/.test(query)) { + const searchRes = await this.search(query, 'dzsearch') + if (searchRes.loadType === 'search' && searchRes.data.length > 0) { + payload.sng_id = searchRes.data[0].info.identifier + } else { + return { loadType: 'empty', data: {} } + } + } + + const { body: result, error } = await makeRequest( + `https://www.deezer.com/ajax/gw-light.php?method=${method}&input=3&api_version=1.0&api_token=${this.csrfToken}`, + { + method: 'POST', + headers: { Cookie: this.cookie }, + body: payload, + disableBodyCompression: true + } + ) + + if (error || !result?.results?.data) { + return { loadType: 'empty', data: {} } + } + + const tracks = result.results.data.map((item) => { + const trackInfo = { + identifier: item.SNG_ID.toString(), + isSeekable: true, + author: item.ART_NAME, + length: item.DURATION * 1000, + isStream: false, + position: 0, + title: item.SNG_TITLE, + uri: `https://www.deezer.com/track/${item.SNG_ID}`, + artworkUrl: `https://e-cdns-images.dzcdn.net/images/cover/${item.ALB_PICTURE}/1000x1000-000000-80-0-0.jpg`, + isrc: item.ISRC || null, + sourceName: 'deezer' + } + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: {} + } + }) + + return { + loadType: 'playlist', + data: { + info: { name: 'Deezer Recommendations', selectedTrack: 0 }, + pluginInfo: { type: 'recommendations' }, + tracks + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + async resolve(url) { if (url.includes('link.deezer.com')) { const res = await http1makeRequest(url, { method: 'GET' }) @@ -207,7 +313,7 @@ export default class DeezerSource { loadType: 'artist', data: { info: { - name: `${artistData.name}\'s Top Tracks`, + name: `${artistData.name}'s Top Tracks`, selectedTrack: 0 }, pluginInfo: {}, @@ -243,62 +349,102 @@ export default class DeezerSource { } async getTrackUrl(decodedTrack) { - const { body: trackData } = await makeRequest( - `https://www.deezer.com/ajax/gw-light.php?method=song.getListData&input=3&api_version=1.0&api_token=${this.csrfToken}`, - { - method: 'POST', - headers: { Cookie: this.cookie }, - body: { sng_ids: [decodedTrack.identifier] }, - disableBodyCompression: true + let searchResult + if (decodedTrack.isrc) { + searchResult = await this.nodelink.sources.search( + 'youtube', + `"${decodedTrack.isrc}"`, + 'ytmsearch' + ) + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = await this.nodelink.sources.search( + 'youtube', + `${decodedTrack.title} ${decodedTrack.author}`, + 'ytmsearch' + ) } - ) - - if (trackData.error.length) { - const message = Object.values(trackData.error).join('; ') - return { exception: { message, severity: 'fault' } } } - const trackInfo = trackData.results.data[0] + if ( + !searchResult || + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + const { body: trackData } = await makeRequest( + `https://www.deezer.com/ajax/gw-light.php?method=song.getListData&input=3&api_version=1.0&api_token=${this.csrfToken}`, + { + method: 'POST', + headers: { Cookie: this.cookie }, + body: { sng_ids: [decodedTrack.identifier] }, + disableBodyCompression: true + } + ) - const { body: streamData } = await makeRequest( - 'https://media.deezer.com/v1/get_url', - { - method: 'POST', - body: { - license_token: this.licenseToken, - media: [ - { - type: 'FULL', - formats: [ - { cipher: 'BF_CBC_STRIPE', format: 'FLAC' }, - { cipher: 'BF_CBC_STRIPE', format: 'MP3_256' }, - { cipher: 'BF_CBC_STRIPE', format: 'MP3_128' }, - { cipher: 'BF_CBC_STRIPE', format: 'MP3_MISC' } - ] - } - ], - track_tokens: [trackInfo.TRACK_TOKEN] - }, - disableBodyCompression: true + if (trackData.error.length) { + const message = Object.values(trackData.error).join('; ') + return { exception: { message, severity: 'fault' } } } - ) - if (streamData.error || !streamData?.data[0]?.media[0]?.sources[0]?.url) { + const trackInfo = trackData.results.data[0] + + const { body: streamData } = await makeRequest( + 'https://media.deezer.com/v1/get_url', + { + method: 'POST', + body: { + license_token: this.licenseToken, + media: [ + { + type: 'FULL', + formats: [ + { cipher: 'BF_CBC_STRIPE', format: 'FLAC' }, + { cipher: 'BF_CBC_STRIPE', format: 'MP3_256' }, + { cipher: 'BF_CBC_STRIPE', format: 'MP3_128' }, + { cipher: 'BF_CBC_STRIPE', format: 'MP3_MISC' } + ] + } + ], + track_tokens: [trackInfo.TRACK_TOKEN] + }, + disableBodyCompression: true + } + ) + + if (streamData.error || !streamData?.data[0]?.media[0]?.sources[0]?.url) { + return { + exception: { + message: 'Could not get stream URL.', + severity: 'common' + } + } + } + + const streamInfo = streamData.data[0].media[0] return { - exception: { message: 'Could not get stream URL.', severity: 'common' } + url: streamInfo.sources[0].url, + protocol: 'https', + format: streamInfo.format.startsWith('MP3') ? 'mp3' : 'flac', + additionalData: trackInfo } } - const streamInfo = streamData.data[0].media[0] - return { - url: streamInfo.sources[0].url, - protocol: 'https', - format: streamInfo.format.startsWith('MP3') ? 'mp3' : 'flac', - additionalData: trackInfo - } + const bestMatch = getBestMatch(searchResult.data, decodedTrack) + if (!bestMatch) + return { + exception: { + message: 'No suitable alternative found.', + severity: 'fault' + } + } + + const streamInfo = await this.nodelink.sources.getTrackUrl(bestMatch.info) + return { newTrack: bestMatch, ...streamInfo } } - loadStream(decodedTrack, url, format, additionalData) { + loadStream(decodedTrack, url, _format, additionalData) { return new Promise(async (resolve) => { try { const outputStream = new PassThrough() diff --git a/src/sources/flowery.js b/src/sources/flowery.js index d8f83af..189c1a5 100644 --- a/src/sources/flowery.js +++ b/src/sources/flowery.js @@ -21,11 +21,29 @@ export default class FlowerySource { async _fetchVoices() { try { + const cachedVoices = this.nodelink.credentialManager.get('flowery_voices') + if (cachedVoices) { + this.voiceMap = new Map(Object.entries(cachedVoices.voiceMap)) + this.defaultVoiceId = cachedVoices.defaultVoiceId + logger( + 'debug', + 'Flowery', + `Loaded ${this.voiceMap.size} voices from CredentialManager.` + ) + return + } + const voicesEndpoint = 'https://api.flowery.pw/v1/tts/voices' - const { body, error, statusCode } = await makeRequest(voicesEndpoint, { method: 'GET' }) + const { body, error, statusCode } = await makeRequest(voicesEndpoint, { + method: 'GET' + }) if (error || statusCode !== 200 || !body || !Array.isArray(body.voices)) { - logger('error', 'Flowery', `Failed to fetch voices from ${voicesEndpoint}: ${error?.message || `Status ${statusCode}`}`) + logger( + 'error', + 'Flowery', + `Failed to fetch voices from ${voicesEndpoint}: ${error?.message || `Status ${statusCode}`}` + ) return } @@ -36,12 +54,29 @@ export default class FlowerySource { if (body.default?.id) { this.defaultVoiceId = body.default.id - logger('info', 'Flowery', `Default voice set to: ${body.default.name} (${body.default.id})`) + logger( + 'info', + 'Flowery', + `Default voice set to: ${body.default.name} (${body.default.id})` + ) } else if (body.voices.length > 0) { this.defaultVoiceId = body.voices[0].id - logger('info', 'Flowery', `Using first available voice as default: ${body.voices[0].name} (${body.voices[0].id})`) + logger( + 'info', + 'Flowery', + `Using first available voice as default: ${body.voices[0].name} (${body.voices[0].id})` + ) } + this.nodelink.credentialManager.set( + 'flowery_voices', + { + voiceMap: Object.fromEntries(this.voiceMap), + defaultVoiceId: this.defaultVoiceId + }, + 24 * 60 * 60 * 1000 + ) + logger('debug', 'Flowery', `Fetched ${this.voiceMap.size} voices.`) } catch (e) { logger('error', 'Flowery', `Exception fetching voices: ${e.message}`) @@ -76,12 +111,12 @@ export default class FlowerySource { async resolve(url) { try { let text = '' - let params = {} + const params = {} if (url.startsWith('ftts://')) { const pathAndQuery = url.slice(7) const splitIdx = pathAndQuery.indexOf('?') - + if (splitIdx !== -1) { text = decodeURIComponent(pathAndQuery.substring(0, splitIdx)) const queryStr = pathAndQuery.substring(splitIdx + 1) @@ -99,7 +134,7 @@ export default class FlowerySource { if (!text) return { loadType: 'empty', data: {} } const apiUrl = this._buildUrl(text, params) - + const track = this.buildTrack({ title: text.length > 50 ? `${text.substring(0, 47)}...` : text, author: 'Flowery TTS', @@ -108,7 +143,6 @@ export default class FlowerySource { }) return { loadType: 'track', data: track } - } catch (e) { return { exception: { message: e.message, severity: 'fault', cause: 'Exception' } @@ -131,23 +165,38 @@ export default class FlowerySource { if (overrides.silence !== undefined) silence = overrides.silence if (overrides.speed !== undefined) speed = overrides.speed } - - let voiceId = this.voiceMap.get(voiceName.toLowerCase()) || this.defaultVoiceId + + let voiceId = + this.voiceMap.get(voiceName.toLowerCase()) || this.defaultVoiceId if (!voiceId) { - logger('warn', 'Flowery', `Voice "${voiceName}" not found and no default voice available. Using a fallback empty voice ID.`) + logger( + 'warn', + 'Flowery', + `Voice "${voiceName}" not found and no default voice available. Using a fallback empty voice ID.` + ) voiceId = 'default' // Fallback to a generic 'default' if no ID is found } let audioFormat = 'mp3' const quality = this.nodelink.options.audio?.quality || 'high' - + switch (quality) { - case 'high': audioFormat = 'wav'; break - case 'medium': audioFormat = 'flac'; break - case 'low': audioFormat = 'ogg_opus'; break - case 'lowest': audioFormat = 'mp3'; break - default: audioFormat = 'wav'; break + case 'high': + audioFormat = 'wav' + break + case 'medium': + audioFormat = 'flac' + break + case 'low': + audioFormat = 'ogg_opus' + break + case 'lowest': + audioFormat = 'mp3' + break + default: + audioFormat = 'wav' + break } const baseUrl = 'https://api.flowery.pw/v1/tts' @@ -196,7 +245,7 @@ export default class FlowerySource { else if (audioFormat === 'ogg_opus') format = 'opus' else if (audioFormat === 'mp3') format = 'mp3' } - } catch (e) { + } catch (_e) { // ignore } diff --git a/src/sources/gaana.js b/src/sources/gaana.js new file mode 100644 index 0000000..0148d2a --- /dev/null +++ b/src/sources/gaana.js @@ -0,0 +1,471 @@ +/* +* Credits: https://github.com/southctrl; adapted for NodeLink +*/ + +import { PassThrough } from 'node:stream' +import { encodeTrack, http1makeRequest, logger } from '../utils.js' + +const USER_AGENT = + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36' + +const BASE_HEADERS = { + 'User-Agent': USER_AGENT, + Accept: '*/*', + Origin: 'https://gaana.com', + Referer: 'https://gaana.com/' +} + +export default class GaanaSource { + constructor(nodelink) { + this.nodelink = nodelink + this.config = nodelink.options.sources?.gaana || {} + this.searchTerms = ['gaanasearch'] + this.patterns = [ + /^@?(?:https?:\/\/)?(?:www\.)?gaana\.com\/(?song|album|playlist|artist)\/(?[\w-]+)(?:[?#].*)?$/ + ] + this.priority = 70 + this.baseUrl = null + this.maxSearchResults = nodelink.options.maxSearchResults || 10 + const maxAlbumPlaylistLength = nodelink.options.maxAlbumPlaylistLength || 100 + this.playlistLoadLimit = this.config.playlistLoadLimit ?? maxAlbumPlaylistLength + this.albumLoadLimit = this.config.albumLoadLimit ?? maxAlbumPlaylistLength + this.artistLoadLimit = this.config.artistLoadLimit ?? maxAlbumPlaylistLength + this.streamQuality = this.config.streamQuality || 'high' + } + + async setup() { + if (this.config.enabled === false) return false + + if (!this.config.apiUrl) { + logger('warn', 'Gaana', 'Missing apiUrl for Gaana source. Disable or configure sources.gaana.apiUrl.') + return false + } + + this.baseUrl = this.config.apiUrl.endsWith('/') ? this.config.apiUrl.slice(0, -1) : this.config.apiUrl + + logger('info', 'Sources', 'Loaded Gaana source.') + return true + } + + async search(query, sourceTerm, searchType = 'track') { + try { + const endpointMap = { + track: 'songs', + album: 'albums', + playlist: 'playlists', + artist: 'artists' + } + + const endpoint = endpointMap[searchType] || 'songs' + const url = `/api/search/${endpoint}?q=${encodeURIComponent(query)}&limit=${this.maxSearchResults}` + + const data = await this.getJson(url) + if (!data) return { loadType: 'empty', data: {} } + + if (searchType === 'track') { + const tracks = data.map((item) => this.mapTrack(item)).filter(Boolean) + return tracks.length ? { loadType: 'search', data: tracks } : { loadType: 'empty', data: {} } + } + + const results = data.map((item) => this.mapCollectionResult(item, searchType)).filter(Boolean) + return results.length ? { loadType: 'search', data: results } : { loadType: 'empty', data: {} } + } catch (e) { + logger('error', 'Gaana', `Search error: ${e.message}`) + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async resolve(url) { + const match = url.match(this.patterns[0]) + if (!match?.groups) return { loadType: 'empty', data: {} } + + const { type, seokey } = match.groups + if (!type || !seokey) return { loadType: 'empty', data: {} } + + try { + if (type === 'song') return await this.getSong(seokey) + if (type === 'album') return await this.getAlbum(seokey) + if (type === 'playlist') return await this.getPlaylist(seokey) + if (type === 'artist') return await this.getArtist(seokey) + return { loadType: 'empty', data: {} } + } catch (e) { + logger('error', 'Gaana', `Resolve error: ${e.message}`) + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async getTrackUrl(decodedTrack) { + try { + let trackId = decodedTrack.identifier + + if (!/^\d+$/.test(String(trackId))) { + const trackData = await this.getJson(`/api/songs/${encodeURIComponent(trackId)}`) + if (!trackData?.track_id) { + return { exception: { message: 'Track metadata not found for stream.', severity: 'common' } } + } + trackId = trackData.track_id + } + + const streamData = await this.getJson( + `/api/stream/${encodeURIComponent(trackId)}?quality=${encodeURIComponent(this.streamQuality)}` + ) + + if (!streamData) { + return { exception: { message: 'Stream URL not found.', severity: 'common' } } + } + + const hlsUrl = streamData.hlsUrl || streamData.hls_url || null + const url = hlsUrl || streamData.url + if (!url) { + return { exception: { message: 'No playable stream URL.', severity: 'common' } } + } + + const segments = Array.isArray(streamData.segments) + ? streamData.segments.map((seg) => seg.url || seg).filter(Boolean) + : [] + + const isHls = Boolean(hlsUrl) + return { + url, + protocol: isHls ? 'hls' : 'https', + format: isHls ? 'mpegts' : streamData.format || 'mp4', + additionalData: isHls + ? {} + : { + initUrl: streamData.initUrl || streamData.init_url || null, + segments + } + } + } catch (e) { + logger('error', 'Gaana', `Stream resolve error: ${e.message}`) + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async loadStream(track, url, protocol, additionalData) { + if (protocol === 'hls') { + const stream = new PassThrough() + this.streamHlsPlaylist(stream, url) + return { stream, type: 'mpegts' } + } + + if (additionalData?.segments?.length) { + const stream = new PassThrough() + this.streamSegments(stream, additionalData.initUrl, additionalData.segments) + return { stream, type: 'mp4' } + } + + const stream = new PassThrough() + this.streamUrl(stream, url) + return { stream, type: 'mp4' } + } + + async streamUrl(outputStream, url) { + try { + const { stream, error, statusCode } = await http1makeRequest(url, { method: 'GET', streamOnly: true }) + + if (error || statusCode !== 200 || !stream) { + throw new Error(error?.message || `Stream status ${statusCode}`) + } + + await new Promise((resolve, reject) => { + stream.on('data', (chunk) => { + if (!outputStream.destroyed) outputStream.write(chunk) + }) + stream.on('end', resolve) + stream.on('error', reject) + }) + } catch (e) { + logger('warn', 'Gaana', `Stream error: ${e.message}`) + if (!outputStream.destroyed) outputStream.emit('error', e) + } finally { + if (!outputStream.destroyed) { + outputStream.emit('finishBuffering') + outputStream.end() + } + } + } + + async streamSegments(outputStream, initUrl, segments) { + const queue = [] + if (initUrl) queue.push(initUrl) + queue.push(...segments) + + try { + for (const segmentUrl of queue) { + if (outputStream.destroyed) break + await this.streamUrlChunk(outputStream, segmentUrl) + } + } catch (e) { + if (!outputStream.destroyed) outputStream.emit('error', e) + } finally { + if (!outputStream.destroyed) { + outputStream.emit('finishBuffering') + outputStream.end() + } + } + } + + async streamHlsPlaylist(outputStream, playlistUrl) { + try { + const playlist = await this.fetchText(playlistUrl) + if (!playlist) throw new Error('Empty HLS playlist') + + const lines = playlist + .split('\n') + .map((l) => l.trim()) + .filter(Boolean) + + if (lines.some((l) => l.startsWith('#EXTINF'))) { + await this.streamHlsSegments(outputStream, playlistUrl, lines) + return + } + + const audioTags = lines.filter( + (l) => l.startsWith('#EXT-X-MEDIA') && l.includes('TYPE=AUDIO') && l.includes('URI="') + ) + + if (audioTags.length) { + const preferred = audioTags.find((l) => /DEFAULT=YES/.test(l)) || audioTags[audioTags.length - 1] + const uri = preferred.match(/URI="([^"]+)"/)?.[1] + if (uri) { + const audioUrl = new URL(uri, playlistUrl).toString() + const audioPlaylist = await this.fetchText(audioUrl) + const audioLines = audioPlaylist + .split('\n') + .map((l) => l.trim()) + .filter(Boolean) + + await this.streamHlsSegments(outputStream, audioUrl, audioLines) + return + } + } + + const variantUrl = lines.find((l) => !l.startsWith('#')) + if (!variantUrl) throw new Error('No HLS variant found') + + const resolved = new URL(variantUrl, playlistUrl).toString() + const variantPlaylist = await this.fetchText(resolved) + const variantLines = variantPlaylist + .split('\n') + .map((l) => l.trim()) + .filter(Boolean) + + await this.streamHlsSegments(outputStream, resolved, variantLines) + } catch (e) { + if (!outputStream.destroyed) outputStream.emit('error', e) + } finally { + if (!outputStream.destroyed) { + outputStream.emit('finishBuffering') + outputStream.end() + } + } + } + + async streamHlsSegments(outputStream, baseUrl, lines) { + const segments = [] + + for (let i = 0; i < lines.length; i++) { + if (lines[i].startsWith('#EXTINF')) { + const uri = lines[i + 1] + if (uri && !uri.startsWith('#')) segments.push(new URL(uri, baseUrl).toString()) + } + } + + for (const segmentUrl of segments) { + if (outputStream.destroyed) break + const ok = await this.streamUrlChunk(outputStream, segmentUrl) + if (!ok) break + } + } + + async streamUrlChunk(outputStream, url) { + try { + const { stream, statusCode, error } = await http1makeRequest(url, { + method: 'GET', + streamOnly: true, + headers: BASE_HEADERS + }) + + if (error || statusCode !== 200 || !stream) { + logger('warn', 'Gaana', `Segment fetch failed: ${error?.message || statusCode}`) + return false + } + + await new Promise((resolve, reject) => { + stream.on('data', (chunk) => { + if (!outputStream.destroyed) outputStream.write(chunk) + }) + stream.on('end', resolve) + stream.on('error', reject) + }) + + return true + } catch (e) { + logger('warn', 'Gaana', `Segment stream error: ${e.message}`) + return false + } + } + + async fetchText(url) { + const { body, statusCode, error } = await http1makeRequest(url, { + method: 'GET', + headers: BASE_HEADERS, + disableBodyCompression: true + }) + + if (error || statusCode !== 200 || !body) { + throw new Error(error?.message || `Playlist status ${statusCode}`) + } + + return typeof body === 'string' ? body : JSON.stringify(body) + } + + mapTrack(track) { + const title = track?.title || track?.name + if (!title) return null + + const duration = Number(track?.duration || 0) * 1000 + if (!duration) return null + + const author = this.formatArtists(track?.artists) || 'unknown' + const identifier = track?.track_id ? String(track.track_id) : String(track?.seokey || '') + if (!identifier) return null + + const seokey = track?.seokey || null + const uri = track?.song_url || (seokey ? `https://gaana.com/song/${seokey}` : null) + + const info = { + identifier, + isSeekable: true, + author, + length: duration, + isStream: false, + position: 0, + title, + uri, + artworkUrl: track?.artworkUrl || track?.artwork || null, + isrc: track?.isrc || null, + sourceName: 'gaana' + } + + return { encoded: encodeTrack(info), info, pluginInfo: {} } + } + + mapCollectionResult(item, type) { + const title = item?.title || item?.name || item?.playlist_name || item?.album || null + if (!title) return null + + const author = + this.formatArtists(item?.artists) || item?.author || item?.artist || 'Gaana' + + const seokey = item?.seokey || item?.playlist_id || item?.artist_id || '' + const url = + item?.album_url || + item?.playlist_url || + item?.artist_url || + (seokey ? `https://gaana.com/${type}/${seokey}` : null) + + const info = { + identifier: String(seokey || title), + isSeekable: true, + author, + length: 0, + isStream: false, + position: 0, + title, + uri: url, + artworkUrl: item?.artworkUrl || item?.artwork || null, + isrc: null, + sourceName: 'gaana' + } + + return { encoded: encodeTrack(info), info, pluginInfo: { type } } + } + + async getSong(seokey) { + const data = await this.getJson(`/api/songs/${encodeURIComponent(seokey)}`) + if (!data) return { loadType: 'empty', data: {} } + + const track = this.mapTrack(data) + return track ? { loadType: 'track', data: track } : { loadType: 'empty', data: {} } + } + + async getAlbum(seokey) { + const data = await this.getJson(`/api/albums/${encodeURIComponent(seokey)}`) + if (!data) return { loadType: 'empty', data: {} } + return this.buildPlaylist(data, 'album') + } + + async getPlaylist(seokey) { + const data = await this.getJson(`/api/playlists/${encodeURIComponent(seokey)}`) + if (!data) return { loadType: 'empty', data: {} } + + const playlist = data.playlist || data + return this.buildPlaylist(playlist, 'playlist') + } + + async getArtist(seokey) { + const data = await this.getJson(`/api/artists/${encodeURIComponent(seokey)}`) + if (!data) return { loadType: 'empty', data: {} } + return this.buildPlaylist(data, 'artist') + } + + buildPlaylist(data, type) { + const name = data?.title || data?.name || data?.playlist_name || 'Gaana' + const tracksArray = data?.tracks || data?.top_tracks || data?.songs || [] + const tracks = tracksArray + .map((item) => this.mapTrack(item)) + .filter(Boolean) + .slice(0, this.getLoadLimit(type)) + + const infoName = type === 'artist' ? `${name}'s Top Tracks` : name + + return { + loadType: 'playlist', + data: { + info: { name: infoName, selectedTrack: 0 }, + pluginInfo: { type }, + tracks + } + } + } + + getLoadLimit(type) { + if (type === 'album') return this.albumLoadLimit + if (type === 'artist') return this.artistLoadLimit + return this.playlistLoadLimit + } + + async getJson(path) { + let finalPath = path.startsWith('/') ? path : `/${path}` + if (this.baseUrl.endsWith('/api') && finalPath.startsWith('/api')) { + finalPath = finalPath.slice(4) + } + + const { body, statusCode, error } = await http1makeRequest(`${this.baseUrl}${finalPath}`, { + method: 'GET', + headers: { + Accept: 'application/json', + 'User-Agent': USER_AGENT, + Referer: 'https://gaana.com/' + }, + disableBodyCompression: true + }) + + if (error || statusCode !== 200 || !body) return null + + if (typeof body === 'object' && body.success !== undefined) { + if (!body.success) return null + return body.data || body + } + + return body + } + + formatArtists(artists) { + if (!artists) return null + if (Array.isArray(artists)) return artists.map((a) => a?.name || a).filter(Boolean).join(', ') + return String(artists) + } +} diff --git a/src/sources/genius.js b/src/sources/genius.js new file mode 100644 index 0000000..1fd7d54 --- /dev/null +++ b/src/sources/genius.js @@ -0,0 +1,295 @@ +import { encodeTrack, http1makeRequest, logger } from '../utils.js' + +const _DURATION_TOLERANCE = 0.15 + +export default class GeniusSource { + constructor(nodelink) { + this.nodelink = nodelink + this.patterns = [ + /https?:\/\/(?:www\.)?genius\.com\/(?:videos|a\/)?([\w-]+)/ + ] + this.searchTerms = [] + this.priority = 100 + } + + async setup() { + logger('info', 'Sources', 'Loaded Genius source (Video/Audio/Article).') + return true + } + + async search(_query) { + return { loadType: 'empty', data: {} } + } + + async resolve(url) { + const match = url.match(this.patterns[0]) + if (!match) return null + + try { + const { body, statusCode } = await http1makeRequest(url, { + method: 'GET', + headers: { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', + Accept: + 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8', + 'Accept-Language': 'en-US,en;q=0.5', + 'Cache-Control': 'no-cache', + Pragma: 'no-cache' + }, + disableBodyCompression: true + }) + + if (statusCode !== 200) { + throw new Error(`Genius returned status ${statusCode}`) + } + + let songInfo = null + + const scriptRegex = + /]*>\s*window\.__PRELOADED_STATE__\s*=\s*JSON\.parse\((.+?)\);\s*<\/script>/s + const scriptMatch = body.match(scriptRegex) + + if (scriptMatch) { + try { + const jsonParseArg = scriptMatch[1] + const parseFunction = new Function( + `return JSON.parse(${jsonParseArg})` + ) + songInfo = parseFunction() + } catch (e) { + logger('debug', 'Genius', `JavaScript execution failed: ${e.message}`) + } + } + + if (!songInfo) { + throw new Error('Could not extract Genius metadata') + } + + const songPage = songInfo.songPage || {} + const songId = songPage.song + + if (!songId) { + throw new Error('Song ID not found in extracted data') + } + + const trackingData = songPage.trackingData || [] + const title = + trackingData.find((x) => x.key === 'Title')?.value || 'Unknown Title' + const artist = + trackingData.find((x) => x.key === 'Primary Artist')?.value || + 'Unknown Artist' + + const entities = songInfo.entities || {} + const songs = entities.songs || {} + let songData = songs[songId] + + if (!songData) { + const firstKey = Object.keys(songs)[0] + if (firstKey) { + songData = songs[firstKey] + } else { + throw new Error('Song data not found in entities') + } + } + + const media = songData?.media || [] + const tracks = [] + + for (const m of media) { + if ((m.type === 'video' || m.type === 'audio') && m.url) { + const trackInfo = { + identifier: m.url, + isSeekable: true, + author: artist, + length: 0, + isStream: false, + position: 0, + title: `${title} (${m.provider})`, + uri: m.url, + artworkUrl: songData.headerImageUrl || songData.songArtImageUrl, + isrc: null, + sourceName: 'genius' + } + + try { + const result = await this.nodelink.sources.resolve(m.url) + if (result.loadType === 'track') { + const info = result.data.info + trackInfo.title = info.title + trackInfo.author = info.author + trackInfo.length = info.length + trackInfo.isStream = info.isStream + trackInfo.isSeekable = info.isSeekable + trackInfo.artworkUrl = info.artworkUrl || trackInfo.artworkUrl + trackInfo.isrc = info.isrc + } else if ( + result.loadType === 'playlist' && + result.data.tracks.length > 0 + ) { + const info = result.data.tracks[0].info + trackInfo.title = info.title + trackInfo.length = info.length + trackInfo.artworkUrl = info.artworkUrl || trackInfo.artworkUrl + } + } catch (e) { + logger( + 'debug', + 'Genius', + `Failed to resolve media URL ${m.url}: ${e.message}; using basic info.` + ) + } + + tracks.push({ + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { provider: m.provider } + }) + } + } + + if (tracks.length === 0) { + const trackInfo = { + identifier: `genius:${songId}`, + isSeekable: true, + author: artist, + length: 0, + isStream: false, + position: 0, + title: title, + uri: url, + artworkUrl: songData?.headerImageUrl || songData?.songArtImageUrl, + isrc: null, + sourceName: 'genius' + } + tracks.push({ + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: {} + }) + } + + return { + loadType: 'playlist', + data: { + info: { name: `${title} - ${artist} (Genius)`, selectedTrack: 0 }, + tracks + } + } + } catch (e) { + logger('error', 'Genius', `Error resolving URL: ${e.message}`) + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async getTrackUrl(decodedTrack) { + if (decodedTrack.uri?.startsWith('http')) { + try { + const result = await this.nodelink.sources.resolve(decodedTrack.uri) + + if ( + result && + (result.loadType === 'track' || + (result.loadType === 'playlist' && result.data.tracks.length > 0)) + ) { + const targetTrack = + result.loadType === 'track' ? result.data : result.data.tracks[0] + const streamInfo = await this.nodelink.sources.getTrackUrl( + targetTrack.info + ) + return { newTrack: targetTrack, ...streamInfo } + } + } catch (e) { + logger( + 'debug', + 'Genius', + `Direct resolve failed for ${decodedTrack.uri}: ${e.message}` + ) + } + } + + const query = `${decodedTrack.title} ${decodedTrack.author}` + try { + const searchResult = await this.nodelink.sources.searchWithDefault(query) + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + return { + exception: { + message: 'No alternative stream found via default search.', + severity: 'fault' + } + } + } + + const bestMatch = await this._findBestMatch( + searchResult.data, + 0, + decodedTrack + ) + + if (!bestMatch) { + return { + exception: { + message: 'No suitable alternative stream found after filtering.', + severity: 'fault' + } + } + } + + const streamInfo = await this.nodelink.sources.getTrackUrl(bestMatch.info) + return { newTrack: bestMatch, ...streamInfo } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async _findBestMatch(list, _target, original) { + const normalizedOriginalTitle = this._normalize(original.title) + const normalizedOriginalAuthor = this._normalize(original.author) + + const scoredCandidates = list + .map((item) => { + const normalizedItemTitle = this._normalize(item.info.title) + const normalizedItemAuthor = this._normalize(item.info.author) + let score = 0 + + if ( + normalizedItemTitle.includes(normalizedOriginalTitle) || + normalizedOriginalTitle.includes(normalizedItemTitle) + ) { + score += 100 + } + + if ( + normalizedItemAuthor.includes(normalizedOriginalAuthor) || + normalizedOriginalAuthor.includes(normalizedItemAuthor) + ) { + score += 100 + } + + return { item, score } + }) + .filter((c) => c.score >= 0) + + if (scoredCandidates.length === 0) { + return null + } + + scoredCandidates.sort((a, b) => b.score - a.score) + return scoredCandidates[0].item + } + + _normalize(str) { + if (!str) return '' + return str + .toLowerCase() + .replace(/feat\.?/g, '') + .replace(/ft\.?/g, '') + .replace(/(\s*\(.*\)\s*)/g, '') + .replace(/[^\w\s]/g, '') + .trim() + } +} diff --git a/src/sources/google-tts.js b/src/sources/google-tts.js index 1b0f5b6..4388f26 100644 --- a/src/sources/google-tts.js +++ b/src/sources/google-tts.js @@ -6,7 +6,7 @@ export default class GoogleTTSSource { this.nodelink = nodelink this.language = this.nodelink.options.sources?.googleTts?.language || 'en-US' - this.searchTerms = ['gtts'] + this.searchTerms = ['gtts', 'speak'] this.baseUrl = 'https://translate.google.com' this.priority = 50 } diff --git a/src/sources/http.js b/src/sources/http.js index 37b2e57..4951cab 100644 --- a/src/sources/http.js +++ b/src/sources/http.js @@ -1,9 +1,9 @@ -import { encodeTrack, logger, http1makeRequest } from '../utils.js' +import { encodeTrack, http1makeRequest, logger } from '../utils.js' export default class HttpSource { constructor(nodelink) { this.nodelink = nodelink - this.searchTerms = ['http'] + this.searchTerms = [] this.priority = 10 } @@ -103,7 +103,7 @@ export default class HttpSource { return { url: info.uri, protocol: 'http' } } - async loadStream(decodedTrack, url) { + async loadStream(_decodedTrack, url) { try { const opts = { method: 'GET', diff --git a/src/sources/instagram.js b/src/sources/instagram.js index c3ae975..a6e6fe1 100644 --- a/src/sources/instagram.js +++ b/src/sources/instagram.js @@ -25,7 +25,21 @@ export default class InstagramSource { } async setup() { - logger('info', 'Sources', 'Fetching Instagram API parameters...') + logger('info', 'Sources', 'Checking Instagram API parameters...') + + const cachedConfig = this.nodelink.credentialManager.get( + 'instagram_api_config' + ) + if (cachedConfig) { + this.apiConfig = { ...this.apiConfig, ...cachedConfig } + logger( + 'info', + 'Sources', + 'Loaded Instagram parameters from CredentialManager.' + ) + return true + } + try { const response = await makeRequest('https://www.instagram.com/', { method: 'GET', @@ -64,6 +78,17 @@ export default class InstagramSource { this.apiConfig.fbLsd = fbLsd if (docIdPost) this.apiConfig.docId_post = docIdPost + this.nodelink.credentialManager.set( + 'instagram_api_config', + { + csrfToken: this.apiConfig.csrfToken, + igAppId: this.apiConfig.igAppId, + fbLsd: this.apiConfig.fbLsd, + docId_post: this.apiConfig.docId_post + }, + 24 * 60 * 60 * 1000 + ) + logger('info', 'Sources', 'Loaded Instagram source.') return true } catch (e) { @@ -90,7 +115,7 @@ export default class InstagramSource { } for (const [index, pattern] of this.patterns.entries()) { const match = url.match(pattern) - if (match && match[1]) { + if (match?.[1]) { if (index === 0) { return { id: match[1], error: null, type: 'audio' } } @@ -253,7 +278,7 @@ export default class InstagramSource { } try { responseData = JSON.parse(responseData) - } catch (e) { + } catch (_e) { return { data: null, exception: { @@ -283,7 +308,8 @@ export default class InstagramSource { return { data: null, exception: { - message: 'Invalid data structure in Audio API JSON response (no payload or metadata)', + message: + 'Invalid data structure in Audio API JSON response (no payload or metadata)', severity: 'fault' } } @@ -326,14 +352,16 @@ export default class InstagramSource { audioUrl = musicAsset?.progressive_download_url if (!audioUrl && musicConsumption?.dash_manifest) { - const urlMatch = musicConsumption.dash_manifest.match(/(.*?)<\/BaseURL>/) - if (urlMatch && urlMatch[1]) { + const urlMatch = musicConsumption.dash_manifest.match( + /(.*?)<\/BaseURL>/ + ) + if (urlMatch?.[1]) { audioUrl = urlMatch[1].replace(/&/g, '&') } } if (!audioUrl) { - audioUrl = audioInfo.progressive_download_url + audioUrl = audioInfo.progressive_download_url } artist = musicAsset?.artist_name || 'User Unknown' @@ -433,7 +461,7 @@ export default class InstagramSource { if (typeof responseData === 'string') { try { responseData = JSON.parse(responseData) - } catch (e) { + } catch (_e) { return { data: null, exception: { @@ -635,7 +663,7 @@ export default class InstagramSource { } } - async loadStream(decodedTrack, url, protocol, additionalData) { + async loadStream(decodedTrack, url, _protocol, _additionalData) { try { const options = { method: 'GET', @@ -678,7 +706,7 @@ export default class InstagramSource { } } - async search(query, type) { + async search(query, _type) { if (this.isLinkMatch(query)) { return this.resolve(query) } diff --git a/src/sources/jiosaavn.js b/src/sources/jiosaavn.js index 7961732..ab832cb 100644 --- a/src/sources/jiosaavn.js +++ b/src/sources/jiosaavn.js @@ -4,8 +4,9 @@ import { encodeTrack, http1makeRequest, logger } from '../utils.js' const API_BASE = 'https://www.jiosaavn.com/api.php' const J_BUFFER = Buffer.from('38346591') const HEADERS = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36', - 'Accept': 'application/json' + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36', + Accept: 'application/json' } const HTML_ENTITY_REGEX = /&(?:quot|amp);/g const ENTITY_MAP = { '"': '"', '&': '&' } @@ -15,8 +16,9 @@ export default class JioSaavnSource { this.nodelink = nodelink this.config = nodelink.options.sources?.jiosaavn || {} this.searchTerms = ['jssearch'] + this.recommendationTerm = ['jsrec'] this.patterns = [ - /https?:\/\/(?:www\.)?jiosaavn\.com\/(?:(?album|featured|song|s\/playlist|artist)\/)(?:[^/]+\/)(?[A-Za-z0-9_,\-]+)/ + /https?:\/\/(?:www\.)?jiosaavn\.com\/(?:(?album|featured|song|s\/playlist|artist)\/)(?:[^/]+\/)(?[A-Za-z0-9_,-]+)/ ] this.priority = 60 this.playlistLoadLimit = this.config.playlistLoadLimit || 50 @@ -29,7 +31,11 @@ export default class JioSaavnSource { return true } - async search(query) { + async search(query, sourceTerm) { + if (this.recommendationTerm.includes(sourceTerm)) { + return this.getRecommendations(query) + } + try { logger('debug', 'JioSaavn', `Searching for: ${query}`) @@ -54,6 +60,87 @@ export default class JioSaavnSource { } } + async getRecommendations(query) { + let id = query + if (!/^[A-Za-z0-9_,-]+$/.test(query)) { + const searchRes = await this.search(query, 'jssearch') + if (searchRes.loadType === 'search' && searchRes.data.length > 0) { + id = searchRes.data[0].info.identifier + } else { + return { loadType: 'empty', data: {} } + } + } + + try { + const encodedId = encodeURIComponent(`["${id}"]`) + let json = await this._getJson({ + __call: 'webradio.createEntityStation', + api_version: '4', + ctx: 'android', + entity_id: encodedId, + entity_type: 'queue' + }) + + if (json?.stationid) { + const stationId = json.stationid + json = await this._getJson({ + __call: 'webradio.getSong', + api_version: '4', + ctx: 'android', + stationid: encodeURIComponent(stationId), + k: '20' + }) + + if (json && !json.error) { + const tracks = Object.values(json) + .filter((item) => item && typeof item === 'object' && item.song) + .map((item) => this._parseTrack(item.song, true)) + + if (tracks.length > 0) { + return { + loadType: 'playlist', + data: { + info: { name: 'JioSaavn Recommendations', selectedTrack: 0 }, + pluginInfo: { type: 'recommendations' }, + tracks + } + } + } + } + } + + const metadata = await this._fetchSongMetadata(id) + if (metadata?.primary_artists_id) { + const artistIdsJoined = metadata.primary_artists_id + json = await this._getJson({ + __call: 'search.artistOtherTopSongs', + api_version: '4', + ctx: 'wap6dot0', + artist_ids: encodeURIComponent(artistIdsJoined), + song_id: encodeURIComponent(id), + language: 'unknown' + }) + + if (json && Array.isArray(json) && json.length > 0) { + const tracks = json.map((item) => this._parseTrack(item, true)) + return { + loadType: 'playlist', + data: { + info: { name: 'JioSaavn Recommendations', selectedTrack: 0 }, + pluginInfo: { type: 'recommendations' }, + tracks + } + } + } + } + + return { loadType: 'empty', data: {} } + } catch (e) { + logger('error', 'JioSaavn', `Recommendations error: ${e.message}`) + return { exception: { message: e.message, severity: 'fault' } } + } + } + async resolve(url) { const match = url.match(this.patterns[0]) if (!match) return { loadType: 'empty', data: {} } @@ -65,7 +152,11 @@ export default class JioSaavnSource { if (type === 'song') { const trackData = await this._fetchSongMetadata(id) if (!trackData) { - logger('error', 'JioSaavn', `All resolution methods failed for song ${id}`) + logger( + 'error', + 'JioSaavn', + `All resolution methods failed for song ${id}` + ) return { loadType: 'empty', data: {} } } return { loadType: 'track', data: this._parseTrack(trackData) } @@ -80,16 +171,27 @@ export default class JioSaavnSource { async getTrackUrl(decodedTrack) { try { - logger('debug', 'JioSaavn', `Fetching stream for: ${decodedTrack.identifier}`) + logger( + 'debug', + 'JioSaavn', + `Fetching stream for: ${decodedTrack.identifier}` + ) const trackData = await this._fetchSongMetadata(decodedTrack.identifier) if (!trackData) { - return { exception: { message: 'Track metadata not found', severity: 'common' } } + return { + exception: { message: 'Track metadata not found', severity: 'common' } + } } if (!trackData.encrypted_media_url) { - return { exception: { message: 'No encrypted_media_url found', severity: 'fault' } } + return { + exception: { + message: 'No encrypted_media_url found', + severity: 'fault' + } + } } let playbackUrl = this._decryptUrl(trackData.encrypted_media_url) @@ -110,7 +212,7 @@ export default class JioSaavnSource { } } - async loadStream(track, url, protocol, additionalData) { + async loadStream(_track, url, _protocol, _additionalData) { const { stream, error, statusCode } = await http1makeRequest(url, { method: 'GET', streamOnly: true @@ -138,7 +240,6 @@ export default class JioSaavnSource { ...params }).toString() - const { body, error, statusCode } = await http1makeRequest(url.toString(), { method: 'GET', headers: HEADERS @@ -150,7 +251,7 @@ export default class JioSaavnSource { try { return typeof body === 'string' ? JSON.parse(body) : body - } catch (e) { + } catch (_e) { throw new Error('Failed to parse JioSaavn response') } } @@ -161,7 +262,11 @@ export default class JioSaavnSource { return data[id] || data.songs[0] } - logger('warn', 'JioSaavn', `song.getDetails failed for ${id}. Retrying with webapi.get...`) + logger( + 'warn', + 'JioSaavn', + `song.getDetails failed for ${id}. Retrying with webapi.get...` + ) data = await this._getJson({ __call: 'webapi.get', @@ -189,7 +294,7 @@ export default class JioSaavnSource { if (!list?.length) return { loadType: 'empty', data: {} } - const tracks = list.map(item => this._parseTrack(item)) + const tracks = list.map((item) => this._parseTrack(item)) let name = data.title || data.name || '' if (type === 'artist') name = `${name}'s Top Tracks` @@ -208,7 +313,9 @@ export default class JioSaavnSource { _decryptUrl(encryptedUrl) { const decipher = crypto.createDecipheriv('des-ecb', J_BUFFER, null) decipher.setAutoPadding(true) - return decipher.update(encryptedUrl, 'base64', 'utf8') + decipher.final('utf8') + return ( + decipher.update(encryptedUrl, 'base64', 'utf8') + decipher.final('utf8') + ) } _cleanString(str) { @@ -222,19 +329,27 @@ export default class JioSaavnSource { const id = json.id const title = this._cleanString(json.title || json.song) const uri = json.perma_url - const duration = (parseInt(json.more_info?.duration || json.duration || '0', 10)) * 1000 + const duration = + parseInt(json.more_info?.duration || json.duration || '0', 10) * 1000 const primaryArtists = json.more_info?.artistMap?.primary_artists const artistList = json.more_info?.artistMap?.artists - const metaArtist = Array.isArray(primaryArtists) && primaryArtists.length - ? primaryArtists - : (Array.isArray(artistList) ? artistList : null) + const metaArtist = + Array.isArray(primaryArtists) && primaryArtists.length + ? primaryArtists + : Array.isArray(artistList) + ? artistList + : null let author if (metaArtist) { - author = metaArtist.map(a => a.name).join(', ') + author = metaArtist.map((a) => a.name).join(', ') } else { - author = json.more_info?.music || json.primary_artists || json.singers || 'Unknown Artist' + author = + json.more_info?.music || + json.primary_artists || + json.singers || + 'Unknown Artist' } const artworkUrl = (json.image || '').replace('150x150', '500x500') diff --git a/src/sources/kwai.js b/src/sources/kwai.js index 5df6c0e..931ea1b 100644 --- a/src/sources/kwai.js +++ b/src/sources/kwai.js @@ -28,7 +28,7 @@ export default class KwaiSource { decodeUnicodeEscapes(str) { if (!str) return null - return str.replace(/\\u([\dA-Fa-f]{4})/g, (match, code) => { + return str.replace(/\\u([\dA-Fa-f]{4})/g, (_match, code) => { return String.fromCharCode(Number.parseInt(code, 16)) }) } @@ -94,7 +94,7 @@ export default class KwaiSource { } } - async search(query) { + async search(_query) { throw { exception: { message: 'Search not supported for Kwai', @@ -173,7 +173,7 @@ export default class KwaiSource { } } - async loadStream(decodedTrack, url, protocol, additionalData) { + async loadStream(_decodedTrack, url, _protocol, _additionalData) { try { const options = { method: 'GET', diff --git a/src/sources/lastfm.js b/src/sources/lastfm.js index 71b56c3..df7ad99 100644 --- a/src/sources/lastfm.js +++ b/src/sources/lastfm.js @@ -1,4 +1,9 @@ -import { http1makeRequest, logger } from '../utils.js' +/* +* Credits: https://github.com/southctrl; adapted for NodeLink +I added support for lfsearch:query in this file. you're welcome <3 +*/ + +import { encodeTrack, getBestMatch, http1makeRequest, logger } from '../utils.js' const LASTFM_PATTERN = /^https?:\/\/(?:www\.)?last\.fm\/(?:[a-z]{2}\/)?music\/.+/ @@ -10,8 +15,12 @@ const YOUTUBE_URL_PATTERN = export default class LastFMSource { constructor(nodelink) { this.nodelink = nodelink + this.config = nodelink.options.sources?.lastfm || {} this.patterns = [LASTFM_PATTERN] this.priority = 40 + this.searchTerms = ['lfsearch'] + this.maxSearchResults = nodelink.options.maxSearchResults || 10 + this.apiKey = this.config.apiKey || null } async setup() { @@ -23,12 +32,24 @@ export default class LastFMSource { return LASTFM_PATTERN.test(link) } - async search() { - return { - exception: { - message: 'Search not supported for Last.fm', - severity: 'common' + async search(query, _sourceTerm, searchType = 'track') { + try { + if (!this.apiKey) { + if (searchType !== 'track') { + return { + exception: { + message: + 'Last.fm API key required for album/artist search. Configure sources.lastfm.apiKey.', + severity: 'common' + } + } + } + return await this._searchTracksHtml(query) } + + return await this._searchApi(query, searchType) + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } } } @@ -64,7 +85,6 @@ export default class LastFMSource { } } - // Check if it's a track URL (contains '_' separator or has 4+ segments) const isTrack = path.includes('_') || path.length >= 4 if (isTrack) { @@ -177,7 +197,214 @@ export default class LastFMSource { } async getTrackUrl(decodedTrack) { - return this.nodelink.sources.getTrackUrl(decodedTrack) + try { + const youtubeUrl = decodedTrack?.pluginInfo?.youtubeUrl + if (youtubeUrl) { + const youtubeResult = await this.nodelink.sources.resolve(youtubeUrl) + if (youtubeResult?.loadType === 'track') { + const streamInfo = await this.nodelink.sources.getTrackUrl( + youtubeResult.data.info + ) + return { newTrack: youtubeResult.data, ...streamInfo } + } + } + + const query = `${decodedTrack.title} ${decodedTrack.author}`.trim() + let searchResult = await this.nodelink.sources.search( + 'youtube', + query, + 'ytmsearch' + ) + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = await this.nodelink.sources.searchWithDefault(query) + } + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + return { + exception: { + message: 'No matching track found on default source.', + severity: 'common' + } + } + } + + const bestMatch = getBestMatch(searchResult.data, decodedTrack) + if (!bestMatch) { + return { + exception: { + message: 'No suitable alternative found after filtering.', + severity: 'common' + } + } + } + + const streamInfo = await this.nodelink.sources.getTrackUrl(bestMatch.info) + return { newTrack: bestMatch, ...streamInfo } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async _searchApi(query, searchType) { + const typeMap = { + track: { method: 'track.search', param: 'track' }, + album: { method: 'album.search', param: 'album' }, + artist: { method: 'artist.search', param: 'artist' } + } + const selected = typeMap[searchType] || typeMap.track + + const url = + `https://ws.audioscrobbler.com/2.0/?method=${selected.method}` + + `&${selected.param}=${encodeURIComponent(query)}` + + `&limit=${this.maxSearchResults}&api_key=${this.apiKey}&format=json` + + const { body, statusCode, error } = await http1makeRequest(url, { + method: 'GET' + }) + + if (error || statusCode !== 200 || !body) { + return { + exception: { + message: `Last.fm API error: ${error?.message || statusCode}`, + severity: 'fault' + } + } + } + + if (body?.error) { + return { + exception: { message: body.message || 'Last.fm API error', severity: 'fault' } + } + } + + const results = this._mapApiResults(body, searchType) + return results.length + ? { loadType: 'search', data: results } + : { loadType: 'empty', data: {} } + } + + _mapApiResults(body, searchType) { + if (searchType === 'album') { + const albums = body?.results?.albummatches?.album || [] + const list = Array.isArray(albums) ? albums : [albums] + return list + .filter((item) => item?.name && item?.artist) + .map((item) => this._buildCollectionResult(item.name, item.artist, item.url, 'album')) + } + + if (searchType === 'artist') { + const artists = body?.results?.artistmatches?.artist || [] + const list = Array.isArray(artists) ? artists : [artists] + return list + .filter((item) => item?.name) + .map((item) => this._buildCollectionResult(item.name, 'Last.fm', item.url, 'artist')) + } + + const tracks = body?.results?.trackmatches?.track || [] + const list = Array.isArray(tracks) ? tracks : [tracks] + return list + .filter((item) => item?.name && item?.artist) + .map((item) => this._buildTrackResult(item.name, item.artist, item.url)) + } + + async _searchTracksHtml(query) { + const url = `https://www.last.fm/search/tracks?q=${encodeURIComponent(query)}` + const { body, statusCode, error } = await http1makeRequest(url, { + method: 'GET' + }) + + if (error || statusCode !== 200 || !body) { + return { + exception: { + message: `Failed to fetch Last.fm search page: ${error?.message || statusCode}`, + severity: 'fault' + } + } + } + + const results = this._parseTrackSearchHtml(body) + return results.length + ? { loadType: 'search', data: results.slice(0, this.maxSearchResults) } + : { loadType: 'empty', data: {} } + } + + _parseTrackSearchHtml(html) { + const results = [] + const regex = + /data-youtube-url="([^"]+)"[\s\S]*?data-track-name="([^"]+)"[\s\S]*?data-track-url="([^"]+)"[\s\S]*?data-artist-name="([^"]+)"/g + + let match + while ((match = regex.exec(html)) !== null) { + const youtubeUrl = this._decodeHtml(match[1]) + const title = this._decodeHtml(match[2]) + const trackUrl = this._decodeHtml(match[3]) + const artist = this._decodeHtml(match[4]) + const fullUrl = trackUrl.startsWith('http') + ? trackUrl + : `https://www.last.fm${trackUrl}` + + results.push( + this._buildTrackResult(title, artist, fullUrl, { + youtubeUrl + }) + ) + } + + return results + } + + _buildTrackResult(title, artist, url, pluginInfo = {}) { + const info = { + identifier: url || `${artist} - ${title}`, + isSeekable: true, + author: artist, + length: 0, + isStream: false, + position: 0, + title, + uri: url, + artworkUrl: null, + isrc: null, + sourceName: 'lastfm' + } + + return { encoded: encodeTrack(info), info, pluginInfo } + } + + _buildCollectionResult(title, author, url, type) { + const info = { + identifier: url || title, + isSeekable: false, + author, + length: 0, + isStream: false, + position: 0, + title, + uri: url, + artworkUrl: null, + isrc: null, + sourceName: 'lastfm' + } + + return { encoded: encodeTrack(info), info, pluginInfo: { type } } + } + + _decodeHtml(text) { + if (!text) return text + return text + .replace(/&/g, '&') + .replace(/"/g, '"') + .replace(/'/g, "'") + .replace(/'/g, "'") + .replace(/</g, '<') + .replace(/>/g, '>') } async loadStream(track, url, protocol, additionalData) { diff --git a/src/sources/local.js b/src/sources/local.js index 00c6913..c010c7c 100644 --- a/src/sources/local.js +++ b/src/sources/local.js @@ -70,7 +70,7 @@ function readFileInfo(filePath) { export default class LocalSource { constructor(nodelink) { this.nodelink = nodelink - this.searchTerms = ['local', 'file'] + this.searchTerms = [] this.priority = 20 } @@ -79,14 +79,17 @@ export default class LocalSource { } async search(query) { + const isAbsolute = path.isAbsolute(query) const basePath = path.resolve( this.nodelink.options.sources.local.basePath || './' ) - const filePath = path.resolve(basePath, query) + const filePath = isAbsolute + ? path.resolve(query) + : path.resolve(basePath, query) logger('debug', 'Sources', `Searching local file: ${filePath}`) - if (!filePath.startsWith(basePath)) { + if (!isAbsolute && !filePath.startsWith(basePath)) { logger( 'warn', 'Sources', @@ -111,7 +114,7 @@ export default class LocalSource { 'Sources', `Local track found: ${track.info.title} [${meta.fileType}]` ) - return { loadType: 'search', data: [track] } + return { loadType: 'track', data: track } } catch (err) { logger( 'warn', @@ -152,7 +155,7 @@ export default class LocalSource { } } - async loadStream(decoded, url, protocol, additional) { + async loadStream(decoded, _url, _protocol, additional) { if (additional?.startTime && decoded.isSeekable) { const info = readFileInfo(decoded.uri) const bps = diff --git a/src/sources/mixcloud.js b/src/sources/mixcloud.js new file mode 100644 index 0000000..4498cf0 --- /dev/null +++ b/src/sources/mixcloud.js @@ -0,0 +1,396 @@ +import { PassThrough } from 'node:stream' +import { + encodeTrack, + http1makeRequest, + loadHLSPlaylist, + logger, + makeRequest +} from '../utils.js' + +const DECRYPTION_KEY = 'IFYOUWANTTHEARTISTSTOGETPAIDDONOTDOWNLOADFROMMIXCLOUD' + +export default class MixcloudSource { + constructor(nodelink) { + this.nodelink = nodelink + this.config = nodelink.options + this.patterns = [ + /https?:\/\/(?:(?:www|beta|m)\.)?mixcloud\.com\/(?[^/]+)\/(?!stream|uploads|favorites|listens|playlists)(?[^/]+)\/?/i, + /https?:\/\/(?:(?:www|beta|m)\.)?mixcloud\.com\/(?[^/]+)\/playlists\/(?[^/]+)\/?/i, + /https?:\/\/(?:(?:www|beta|m)\.)?mixcloud\.com\/(?[^/]+)\/(?uploads|favorites|listens|stream)?\/?/i + ] + this.searchTerms = ['mcsearch'] + this.priority = 90 + } + + async setup() { + return true + } + + async _request(query) { + const apiUrl = `https://app.mixcloud.com/graphql?query=${encodeURIComponent(query)}` + return makeRequest( + apiUrl, + { + method: 'GET', + headers: { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + } + }, + this.nodelink + ) + } + + async search(query) { + try { + const apiUrl = `https://api.mixcloud.com/search/?q=${encodeURIComponent(query)}&type=cloudcast` + let { body, statusCode, error } = await http1makeRequest(apiUrl, { + headers: { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + }, + disableBodyCompression: true + }) + + if (error) throw error + + if (typeof body === 'string') { + try { + body = JSON.parse(body) + } catch { + logger( + 'warn', + 'Mixcloud', + 'Failed to parse search API response as JSON' + ) + return { loadType: 'empty', data: {} } + } + } + + if (statusCode !== 200 || !body?.data) { + logger('warn', 'Mixcloud', `Search API returned status ${statusCode}`) + return { loadType: 'empty', data: {} } + } + + if (body.data.length === 0) return { loadType: 'empty', data: {} } + + const tracks = body.data + .map((item) => { + const pathParts = item.url + .split('mixcloud.com/')[1] + .split('/') + .filter(Boolean) + const trackInfo = { + identifier: `${pathParts[0]}_${pathParts[1]}`, + isSeekable: true, + author: item.user?.name || pathParts[0], + length: (item.audio_length || 0) * 1000, + isStream: false, + position: 0, + title: item.name, + uri: item.url, + artworkUrl: item.pictures?.large || item.pictures?.medium || null, + isrc: null, + sourceName: 'mixcloud' + } + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: {} + } + }) + .slice(0, this.nodelink.options.maxSearchResults || 10) + + return { loadType: 'search', data: tracks } + } catch (e) { + logger('error', 'Mixcloud', `Search failed: ${e.message}`) + return { loadType: 'empty', data: {} } + } + } + + _decrypt(ciphertextB64) { + const ciphertext = Buffer.from(ciphertextB64, 'base64') + const key = Buffer.from(DECRYPTION_KEY) + const decrypted = Buffer.alloc(ciphertext.length) + + for (let i = 0; i < ciphertext.length; i++) { + decrypted[i] = ciphertext[i] ^ key[i % key.length] + } + + return decrypted.toString('utf-8') + } + + async resolve(url) { + if (this.patterns[0].test(url)) return this._resolveTrack(url) + if (this.patterns[1].test(url)) return this._resolvePlaylist(url) + if (this.patterns[2].test(url)) return this._resolveUser(url) + + return { loadType: 'empty', data: {} } + } + + async _resolveTrack(url) { + const match = url.match(this.patterns[0]) + const { user: username, slug } = match.groups + try { + const query = `{ + cloudcastLookup(lookup: {username: "${username}", slug: "${slug}"}) { + audioLength + name + url + owner { displayName username } + picture(width: 1024, height: 1024) { url } + streamInfo { hlsUrl url } + restrictedReason + } + }` + + const { body, statusCode } = await this._request(query) + + if (statusCode !== 200 || !body.data?.cloudcastLookup) { + return { loadType: 'empty', data: {} } + } + + const data = body.data.cloudcastLookup + if (data.restrictedReason) { + throw new Error(`Track restricted: ${data.restrictedReason}`) + } + + const track = this._parseTrackData(data) + + return { + loadType: 'track', + data: track + } + } catch (e) { + logger('error', 'Mixcloud', `Track resolution failed: ${e.message}`) + return { + loadType: 'error', + data: { message: e.message, severity: 'fault' } + } + } + } + + async _resolvePlaylist(url) { + const match = url.match(this.patterns[1]) + const { user, playlist: slug } = match.groups + try { + const queryTemplate = (cursor) => `{ + playlistLookup(lookup: {username: "${user}", slug: "${slug}"}) { + name + items(first: 100${cursor ? `, after: "${cursor}"` : ''}) { + edges { + node { + cloudcast { + audioLength + name + url + owner { displayName username } + picture(width: 1024, height: 1024) { url } + streamInfo { hlsUrl url } + } + } + } + pageInfo { endCursor hasNextPage } + } + } + }` + + const tracks = [] + let cursor = null + let hasNextPage = true + let playlistName = 'Mixcloud Playlist' + + while ( + hasNextPage && + tracks.length < (this.config.maxAlbumPlaylistLength || 1000) + ) { + const { body, statusCode } = await this._request(queryTemplate(cursor)) + if (statusCode !== 200 || !body.data?.playlistLookup) break + + const data = body.data.playlistLookup + playlistName = data.name + + for (const edge of data.items.edges) { + const track = edge.node.cloudcast + if (!track) continue + tracks.push(this._parseTrackData(track)) + } + + cursor = data.items.pageInfo.endCursor + hasNextPage = data.items.pageInfo.hasNextPage + } + + return { + loadType: 'playlist', + data: { + info: { name: playlistName, selectedTrack: 0 }, + tracks + } + } + } catch (e) { + logger('error', 'Mixcloud', `Playlist resolution failed: ${e.message}`) + return { + loadType: 'error', + data: { message: e.message, severity: 'fault' } + } + } + } + + async _resolveUser(url) { + const match = url.match(this.patterns[2]) + const { id: username, type = 'uploads' } = match.groups + try { + const queryType = type === 'stream' ? 'stream' : type + const queryTemplate = (cursor) => `{ + userLookup(lookup: {username: "${username}"}) { + displayName + ${queryType}(first: 100${cursor ? `, after: "${cursor}"` : ''}) { + edges { + node { + ${type === 'stream' ? '... on Cloudcast { audioLength name url owner { displayName username } picture(width: 1024, height: 1024) { url } streamInfo { hlsUrl url } }' : 'audioLength name url owner { displayName username } picture(width: 1024, height: 1024) { url } streamInfo { hlsUrl url }'} + } + } + pageInfo { endCursor hasNextPage } + } + } + }` + + const tracks = [] + let cursor = null + let hasNextPage = true + let userDisplayName = username + + while ( + hasNextPage && + tracks.length < (this.config.maxAlbumPlaylistLength || 1000) + ) { + const { body, statusCode } = await this._request(queryTemplate(cursor)) + if (statusCode !== 200 || !body.data?.userLookup?.[queryType]) break + + const data = body.data.userLookup + userDisplayName = data.displayName + const list = data[queryType] + + for (const edge of list.edges) { + if (!edge.node.url) continue + tracks.push(this._parseTrackData(edge.node)) + } + + cursor = list.pageInfo.endCursor + hasNextPage = list.pageInfo.hasNextPage + } + + return { + loadType: 'playlist', + data: { + info: { name: `${userDisplayName} (${type})`, selectedTrack: 0 }, + tracks + } + } + } catch (e) { + logger('error', 'Mixcloud', `User resolution failed: ${e.message}`) + return { + loadType: 'error', + data: { message: e.message, severity: 'fault' } + } + } + } + + _parseTrackData(data) { + const pathParts = data.url + .split('mixcloud.com/')[1] + .split('/') + .filter(Boolean) + const trackInfo = { + identifier: `${pathParts[0]}_${pathParts[1]}`, + isSeekable: true, + author: data.owner?.displayName || pathParts[0], + length: (data.audioLength || 0) * 1000, + isStream: false, + position: 0, + title: data.name, + uri: data.url, + artworkUrl: data.picture?.url || null, + isrc: null, + sourceName: 'mixcloud' + } + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { + encryptedHls: data.streamInfo?.hlsUrl, + encryptedUrl: data.streamInfo?.url + } + } + } + + async getTrackUrl(decodedTrack) { + let { encryptedHls, encryptedUrl } = decodedTrack.pluginInfo || {} + + if (!encryptedHls && !encryptedUrl) { + const res = await this._resolveTrack(decodedTrack.uri) + if (res.loadType === 'track') { + encryptedHls = res.data.pluginInfo.encryptedHls + encryptedUrl = res.data.pluginInfo.encryptedUrl + } + } + + if (encryptedUrl) { + return { + url: this._decrypt(encryptedUrl), + protocol: 'https', + format: 'aac' + } + } + + if (encryptedHls) { + return { + url: this._decrypt(encryptedHls), + protocol: 'hls', + format: 'aac' + } + } + + throw new Error('No stream URL available for Mixcloud track') + } + + async loadStream(_decodedTrack, url, protocol) { + try { + if (protocol === 'hls') { + const stream = new PassThrough() + loadHLSPlaylist(url, stream) + return { stream, type: 'aac' } + } + + const options = { + method: 'GET', + streamOnly: true, + headers: { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', + Referer: 'https://www.mixcloud.com/' + } + } + + const response = await http1makeRequest(url, options) + + if (response.error || !response.stream) { + throw response.error || new Error('Failed to get stream') + } + + const stream = new PassThrough() + response.stream.on('data', (chunk) => stream.write(chunk)) + response.stream.on('end', () => stream.emit('finishBuffering')) + response.stream.on('error', (error) => { + logger('error', 'Mixcloud', `Upstream stream error: ${error.message}`) + stream.emit('error', error) + stream.emit('finishBuffering') + }) + + return { stream, type: protocol === 'hls' ? 'aac' : 'm4a' } + } catch (e) { + logger('error', 'Mixcloud', `Failed to load stream: ${e.message}`) + return { exception: { message: e.message, severity: 'fault' } } + } + } +} diff --git a/src/sources/nicovideo.js b/src/sources/nicovideo.js index d1c1eb6..32dbebd 100644 --- a/src/sources/nicovideo.js +++ b/src/sources/nicovideo.js @@ -1,6 +1,7 @@ import crypto from 'node:crypto' import { PassThrough } from 'node:stream' import { encodeTrack, http1makeRequest, logger } from '../utils.js' + async function manageEncryptedHls(url, stream, headers) { try { const { @@ -396,7 +397,7 @@ export default class NicoVideoSource { additionalData: { cookie } } } - async loadStream(track, url, protocol, additionalData) { + async loadStream(_track, url, protocol, additionalData) { if (protocol === 'hls') { const stream = new PassThrough() const headers = additionalData?.cookie diff --git a/src/sources/pandora.js b/src/sources/pandora.js index 8053e1e..f0f8f8f 100644 --- a/src/sources/pandora.js +++ b/src/sources/pandora.js @@ -1,4 +1,10 @@ -import { encodeTrack, http1makeRequest, logger, makeRequest } from '../utils.js' +import { + encodeTrack, + getBestMatch, + http1makeRequest, + logger, + makeRequest +} from '../utils.js' export default class PandoraSource { constructor(nodelink) { @@ -21,43 +27,114 @@ export default class PandoraSource { this.setupPromise = (async () => { try { + const cachedAuth = + this.nodelink.credentialManager.get('pandora_auth_token') + const cachedCsrf = + this.nodelink.credentialManager.get('pandora_csrf_token') + + if (cachedAuth && cachedCsrf) { + this.authToken = cachedAuth + this.csrfToken = cachedCsrf + logger( + 'info', + 'Pandora', + 'Loaded Pandora credentials from CredentialManager.' + ) + return true + } + logger('debug', 'Pandora', 'Setting Pandora auth and CSRF token.') - const pandoraRequest = await makeRequest('https://www.pandora.com', { - method: 'HEAD' - }) + const csrfTokenValue = this.csrfTokenConfig + const remoteUrl = this.config.sources?.pandora?.remoteTokenUrl + + if (remoteUrl) { + logger( + 'info', + 'Pandora', + `Fetching tokens from remote provider: ${remoteUrl}` + ) + try { + const { body, error, statusCode } = await makeRequest(remoteUrl, { + method: 'GET' + }) + if ( + !error && + statusCode === 200 && + body.success && + body.authToken && + body.csrfToken + ) { + this.authToken = body.authToken + this.csrfToken = { + raw: `csrftoken=${body.csrfToken};Path=/;Domain=.pandora.com;Secure`, + parsed: body.csrfToken + } - if (pandoraRequest.error) { - logger('error', 'Pandora', 'Failed to set CSRF token from Pandora.') - return false + const cacheTtlMs = (body.expires_in_seconds || 3600) * 1000 + this.nodelink.credentialManager.set( + 'pandora_auth_token', + this.authToken, + cacheTtlMs + ) + this.nodelink.credentialManager.set( + 'pandora_csrf_token', + this.csrfToken, + cacheTtlMs + ) + + logger( + 'info', + 'Pandora', + 'Successfully initialized with remote tokens (bypass active).' + ) + return true + } + logger( + 'warn', + 'Pandora', + `Remote provider failed (Status: ${statusCode}). Falling back to local login.` + ) + } catch (e) { + logger( + 'warn', + 'Pandora', + `Exception during remote token fetch: ${e.message}. Falling back to local login.` + ) + } } - const cookies = pandoraRequest.headers['set-cookie'] - const csrfCookie = cookies - ? this.csrfTokenConfig || cookies.find(cookie => cookie.startsWith('csrftoken=')) - : null - - if (!csrfCookie) { - logger('error', 'Pandora', 'Failed to find CSRF token cookie.') - return false - } + if (csrfTokenValue) { + this.csrfToken = { + raw: `csrftoken=${csrfTokenValue};Path=/;Domain=.pandora.com;Secure`, + parsed: csrfTokenValue + } + } else { + const pandoraRequest = await makeRequest('https://www.pandora.com', { + method: 'HEAD' + }) - if (this.csrfTokenConfig) { - const csrfMatch = `csrftoken=${this.csrfTokenConfig};Path=/;Domain=.pandora.com;Secure` - if (!csrfMatch) { - logger('error', 'Pandora', 'Failed to parse provided CSRF token.') + if (pandoraRequest.error) { + logger('error', 'Pandora', 'Failed to set CSRF token from Pandora.') return false } - this.csrfToken = { - raw: csrfMatch, - parsed: this.csrfTokenConfig + + const cookies = pandoraRequest.headers['set-cookie'] + const csrfCookie = cookies + ? cookies.find((cookie) => cookie.startsWith('csrftoken=')) + : null + + if (!csrfCookie) { + logger('error', 'Pandora', 'Failed to find CSRF token cookie.') + return false } - } else { + const csrfMatch = /csrftoken=([a-f0-9]{16})/.exec(csrfCookie) if (!csrfMatch) { logger('error', 'Pandora', 'Failed to parse CSRF token.') return false } + this.csrfToken = { raw: csrfCookie.split(';')[0], parsed: csrfMatch[1] @@ -84,7 +161,22 @@ export default class PandoraSource { this.authToken = tokenRequest.body.authToken - logger('info', 'Pandora', 'Successfully set Pandora auth and CSRF token.') + this.nodelink.credentialManager.set( + 'pandora_auth_token', + this.authToken, + 24 * 60 * 60 * 1000 + ) + this.nodelink.credentialManager.set( + 'pandora_csrf_token', + this.csrfToken, + 24 * 60 * 60 * 1000 + ) + + logger( + 'info', + 'Pandora', + 'Successfully set Pandora auth and CSRF token.' + ) return true } catch (e) { logger('error', 'Pandora', `Setup failed: ${e.message}`) @@ -152,7 +244,10 @@ export default class PandoraSource { const authError = await this._ensureAuth() if (authError) return authError - const typeMatch = /^(https:\/\/www\.pandora\.com\/)((playlist)|(station)|(podcast)|(artist))\/.+/.exec(url) + const typeMatch = + /^(https:\/\/www\.pandora\.com\/)((playlist)|(station)|(podcast)|(artist))\/.+/.exec( + url + ) if (!typeMatch) { return { loadType: 'empty', data: {} } @@ -282,7 +377,9 @@ export default class PandoraSource { Object.keys(data.annotations), this.config.maxAlbumPlaylistLength ) - const tracks = trackKeys.map(key => this.buildTrack(data.annotations[key])) + const tracks = trackKeys.map((key) => + this.buildTrack(data.annotations[key]) + ) return { loadType: 'playlist', @@ -345,16 +442,21 @@ export default class PandoraSource { } const topTracks = data.data?.entity?.topTracksWithCollaborations || [] - const items = this._limitArray(topTracks, this.config.maxAlbumPlaylistLength) + const items = this._limitArray( + topTracks, + this.config.maxAlbumPlaylistLength + ) - const tracks = items.map(item => this.buildTrack({ - name: item.name, - artistName: item.artistName?.name, - shareableUrlPath: item.shareableUrlPath, - icon: item.icon, - pandoraId: item.pandoraId, - duration: item.duration - })) + const tracks = items.map((item) => + this.buildTrack({ + name: item.name, + artistName: item.artistName?.name, + shareableUrlPath: item.shareableUrlPath, + icon: item.icon, + pandoraId: item.pandoraId, + duration: item.duration + }) + ) return { loadType: 'playlist', @@ -395,8 +497,10 @@ export default class PandoraSource { return this._buildException(error) } - const keys = Object.keys(data.annotations).filter(key => key.includes('TR:')) - const tracks = keys.map(key => this.buildTrack(data.annotations[key])) + const keys = Object.keys(data.annotations).filter((key) => + key.includes('TR:') + ) + const tracks = keys.map((key) => this.buildTrack(data.annotations[key])) return { loadType: 'playlist', @@ -451,18 +555,24 @@ export default class PandoraSource { for (const item of playlistData.items) { if (!item.songName) continue - tracks.push(this.buildTrack({ - name: item.songName, - artistName: item.artistName, - shareableUrlPath: item.songDetailUrl, - icon: { artUrl: item.albumArtUrl }, - pandoraId: item.songId, - duration: item.trackLength - })) + tracks.push( + this.buildTrack({ + name: item.songName, + artistName: item.artistName, + shareableUrlPath: item.songDetailUrl, + icon: { artUrl: item.albumArtUrl }, + pandoraId: item.songId, + duration: item.trackLength + }) + ) } } } catch (e) { - logger('debug', 'Pandora', `Failed to fetch station playlist: ${e.message}`) + logger( + 'debug', + 'Pandora', + `Failed to fetch station playlist: ${e.message}` + ) } if (tracks.length === 0) { @@ -473,15 +583,17 @@ export default class PandoraSource { for (const seed of seeds) { if (!seed.song) continue - tracks.push(this.buildTrack({ - name: seed.song.songTitle, - artistName: seed.song.artistSummary, - shareableUrlPath: seed.song.songDetailUrl, - icon: { - artUrl: seed.art?.[seed.art.length - 1]?.url - }, - pandoraId: seed.song.songId - })) + tracks.push( + this.buildTrack({ + name: seed.song.songTitle, + artistName: seed.song.artistSummary, + shareableUrlPath: seed.song.songDetailUrl, + icon: { + artUrl: seed.art?.[seed.art.length - 1]?.url + }, + pandoraId: seed.song.songId + }) + ) } } @@ -536,7 +648,9 @@ export default class PandoraSource { } const allEpisodesIds = this._limitArray( - allEpisodesIdsData.episodes.episodesWithLabel.flatMap(yearInfo => yearInfo.episodes), + allEpisodesIdsData.episodes.episodesWithLabel.flatMap( + (yearInfo) => yearInfo.episodes + ), this.config.maxAlbumPlaylistLength ) @@ -555,10 +669,12 @@ export default class PandoraSource { } const episodes = Object.keys(allEpisodesData.annotations) - const tracks = episodes.map(epKey => this.buildTrack(allEpisodesData.annotations[epKey])) + const tracks = episodes.map((epKey) => + this.buildTrack(allEpisodesData.annotations[epKey]) + ) const programId = Object.keys(allEpisodesData.annotations).find( - key => allEpisodesData.annotations[key].type === 'PC' + (key) => allEpisodesData.annotations[key].type === 'PC' ) const programName = programId ? allEpisodesData.annotations[programId].name @@ -599,13 +715,45 @@ export default class PandoraSource { } } - async getTrackUrl(track) { - const query = `${track.title} ${track.author}` + async getTrackUrl(decodedTrack) { + const query = `${decodedTrack.title} ${decodedTrack.author}` try { - const searchResult = await this.nodelink.sources.searchWithDefault(query) + let searchResult + + if (decodedTrack.isrc) { + searchResult = await this.nodelink.sources.search( + 'youtube', + `"${decodedTrack.isrc}"`, + 'ytmsearch' + ) + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = null + } + } - if (searchResult.loadType !== 'search' || searchResult.data.length === 0) { + if (!searchResult) { + searchResult = await this.nodelink.sources.search( + 'youtube', + query, + 'ytmsearch' + ) + } + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = await this.nodelink.sources.searchWithDefault(query) + } + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { return { exception: { message: 'No matching track found on default source.', @@ -614,9 +762,17 @@ export default class PandoraSource { } } - const bestMatch = searchResult.data[0] - const streamInfo = await this.nodelink.sources.getTrackUrl(bestMatch.info) + const bestMatch = getBestMatch(searchResult.data, decodedTrack) + if (!bestMatch) { + return { + exception: { + message: 'No suitable alternative found after filtering.', + severity: 'common' + } + } + } + const streamInfo = await this.nodelink.sources.getTrackUrl(bestMatch.info) return { newTrack: bestMatch, ...streamInfo } } catch (e) { logger('error', 'Pandora', `Failed to mirror track: ${e.message}`) diff --git a/src/sources/pinterest.js b/src/sources/pinterest.js new file mode 100644 index 0000000..2ecd49c --- /dev/null +++ b/src/sources/pinterest.js @@ -0,0 +1,168 @@ +import { PassThrough } from 'node:stream' +import { encodeTrack, http1makeRequest, logger } from '../utils.js' + +export default class PinterestSource { + constructor(nodelink) { + this.nodelink = nodelink + this.config = nodelink.options + this.patterns = [ + /https?:\/\/(?:[^/]+\.)?pinterest\.(?:com|fr|de|ch|jp|cl|ca|it|co\.uk|nz|ru|com\.au|at|pt|co\.kr|es|com\.mx|dk|ph|th|com\.uy|co|nl|info|kr|ie|vn|com\.vn|ec|mx|in|pe|co\.at|hu|co\.in|co\.nz|id|com\.ec|com\.py|tw|be|uk|com\.bo|com\.pe)\/pin\/(?:[\w-]+--)?(\d+)/i + ] + this.priority = 100 + } + + async setup() { + return true + } + + async resolve(url) { + const match = url.match(this.patterns[0]) + if (!match) return { loadType: 'empty', data: {} } + + const videoId = match[1] + try { + const apiUrl = `https://www.pinterest.com/resource/PinResource/get/?data=${encodeURIComponent( + JSON.stringify({ + options: { + field_set_key: 'unauth_react_main_pin', + id: videoId + } + }) + )}` + + const { body, statusCode } = await http1makeRequest(apiUrl, { + headers: { 'X-Pinterest-PWS-Handler': 'www/[username].js' } + }) + + if (statusCode !== 200 || !body.resource_response?.data) { + return { loadType: 'empty', data: {} } + } + + const data = body.resource_response.data + const videoList = + data.videos?.video_list || + data.story_pin_data?.pages?.[0]?.blocks?.find( + (b) => b.video?.video_list + )?.video?.video_list + + if (!videoList) return { loadType: 'empty', data: {} } + + const bestFormat = + videoList.V_720P || + videoList.V_540P || + videoList.V_360P || + Object.values(videoList)[0] + const artwork = + data.images?.orig?.url || Object.values(data.images || {})[0]?.url + + const trackInfo = { + identifier: videoId, + isSeekable: true, + author: + data.closeup_attribution?.full_name || + data.pinner?.full_name || + 'Unknown Artist', + length: Math.round(bestFormat.duration) || 0, + isStream: false, + position: 0, + title: data.title || data.grid_title || 'Pinterest Video', + uri: `https://www.pinterest.com/pin/${videoId}/`, + artworkUrl: artwork || null, + isrc: null, + sourceName: 'pinterest' + } + + return { + loadType: 'track', + data: { encoded: encodeTrack(trackInfo), info: trackInfo } + } + } catch (e) { + logger('error', 'Pinterest', `Resolution failed: ${e.message}`) + return { + loadType: 'error', + data: { message: e.message, severity: 'fault' } + } + } + } + + async getTrackUrl(decodedTrack) { + const videoId = decodedTrack.identifier + try { + const apiUrl = `https://www.pinterest.com/resource/PinResource/get/?data=${encodeURIComponent( + JSON.stringify({ + options: { + field_set_key: 'unauth_react_main_pin', + id: videoId + } + }) + )}` + + const { body, statusCode } = await http1makeRequest(apiUrl, { + headers: { 'X-Pinterest-PWS-Handler': 'www/[username].js' } + }) + + if (statusCode !== 200 || !body.resource_response?.data) { + throw new Error('Failed to fetch Pinterest video URL') + } + + const data = body.resource_response.data + const videoList = + data.videos?.video_list || + data.story_pin_data?.pages?.[0]?.blocks?.find( + (b) => b.video?.video_list + )?.video?.video_list + + const format = + videoList?.V_720P || + videoList?.V_540P || + videoList?.V_360P || + Object.values(videoList || {}).find((v) => v.url?.endsWith('.mp4')) + + if (!format?.url) + throw new Error('No MP4 format found for Pinterest video') + + return { url: format.url, protocol: 'http', format: 'mp4' } + } catch (e) { + logger('error', 'Pinterest', `Failed to get track URL: ${e.message}`) + throw e + } + } + + async loadStream(_decodedTrack, url) { + try { + const options = { + method: 'GET', + streamOnly: true, + headers: { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3', + Accept: '*/*' + } + } + + const response = await http1makeRequest(url, options) + + if (response.error || !response.stream) { + throw ( + response.error || + new Error('Failed to get stream, no stream object returned.') + ) + } + + const stream = new PassThrough() + + response.stream.on('data', (chunk) => stream.write(chunk)) + response.stream.on('end', () => stream.emit('finishBuffering')) + response.stream.on('error', (error) => { + logger('error', 'Pinterest', `Upstream stream error: ${error.message}`) + stream.emit('error', error) + stream.emit('finishBuffering') + }) + + return { stream, type: 'mp4' } + } catch (e) { + logger('error', 'Pinterest', `Failed to load stream: ${e.message}`) + return { exception: { message: e.message, severity: 'fault' } } + } + } +} diff --git a/src/sources/reddit.js b/src/sources/reddit.js index 8b21bb3..46856e7 100644 --- a/src/sources/reddit.js +++ b/src/sources/reddit.js @@ -1,5 +1,5 @@ -import { encodeTrack, makeRequest, logger } from '../utils.js' import { PassThrough } from 'node:stream' +import { encodeTrack, logger, makeRequest } from '../utils.js' const USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3' diff --git a/src/sources/shazam.js b/src/sources/shazam.js new file mode 100644 index 0000000..0e1ff07 --- /dev/null +++ b/src/sources/shazam.js @@ -0,0 +1,234 @@ +import { + encodeTrack, + getBestMatch, + http1makeRequest, + logger +} from '../utils.js' + +export default class ShazamSource { + constructor(nodelink) { + this.nodelink = nodelink + this.config = nodelink.options + this.searchTerms = ['shsearch', 'szsearch'] + this.patterns = [/https?:\/\/(?:www\.)?shazam\.com\/song\/\d+\/([^/?#]+)/] + this.priority = 90 + this.allowExplicit = true + } + + async setup() { + const shazamConfig = this.config.sources?.shazam || {} + this.allowExplicit = shazamConfig.allowExplicit ?? true + return true + } + + async search(query) { + try { + const limit = this.config.maxSearchResults || 10 + const url = `https://www.shazam.com/services/amapi/v1/catalog/US/search?types=songs&term=${encodeURIComponent(query)}&limit=${limit}` + + const { body: data, statusCode } = await http1makeRequest(url) + if (statusCode !== 200) return { loadType: 'empty', data: {} } + + const songs = data?.results?.songs?.data || [] + if (!songs.length) return { loadType: 'empty', data: {} } + + const tracks = [] + for (let i = 0; i < songs.length; i++) { + const t = this._buildTrack(songs[i]) + if (t) tracks.push(t) + } + + return { loadType: 'search', data: tracks } + } catch (error) { + logger('error', 'Shazam', `Search failed for ${query}: ${error.message}`) + return { exception: { message: error.message, severity: 'fault' } } + } + } + + async resolve(url) { + try { + const res = await http1makeRequest(url) + if (res.statusCode !== 200) return { loadType: 'empty', data: {} } + + const html = + typeof res.body === 'string' ? res.body : String(res.body ?? '') + + const extractTextAfterClass = (classPart) => { + let from = 0 + while (true) { + const c = html.indexOf('class="', from) + if (c === -1) return null + + const q = html.indexOf('"', c + 7) + if (q === -1) return null + + const cls = html.slice(c + 7, q) + if (cls.includes(classPart)) { + const gt = html.indexOf('>', q) + if (gt === -1) return null + const lt = html.indexOf('<', gt + 1) + if (lt === -1) return null + const text = html.slice(gt + 1, lt).trim() + return text || null + } + + from = q + 1 + } + } + + const extractHrefStartingAt = (hrefPrefix) => { + const i = html.indexOf(hrefPrefix) + if (i === -1) return null + const start = i + 6 + const end = html.indexOf('"', start) + return end > start ? html.slice(start, end) : null + } + + const extractArtworkFromImgAlt = () => { + const ogImage = html.match(/', altIdx) + if (imgEnd === -1) return null + + const tag = html.slice(imgStart, imgEnd + 1) + const s = tag.indexOf('srcset="') + if (s === -1) return null + + const valStart = s + 8 + const valEnd = tag.indexOf('"', valStart) + if (valEnd === -1) return null + + const srcset = tag.slice(valStart, valEnd) + const space = srcset.indexOf(' ') + return (space === -1 ? srcset : srcset.slice(0, space)) || null + } + + const appleMusicUrl = extractHrefStartingAt( + 'href="https://www.shazam.com/applemusic/song/' + ) + + const title = + extractTextAfterClass('NewTrackPageHeader_trackTitle__') || 'Unknown' + const artist = + extractTextAfterClass('TrackPageArtistLink_artistNameText__') || + 'Unknown' + + const artworkUrl = extractArtworkFromImgAlt() + + if (title === 'Unknown' && !appleMusicUrl) + return { loadType: 'empty', data: {} } + + const cleanUrl = url.endsWith('/') ? url.slice(0, -1) : url + const identifier = cleanUrl.slice(cleanUrl.lastIndexOf('/') + 1) + + const trackInfo = { + identifier, + isSeekable: true, + author: artist, + length: 0, + isStream: false, + position: 0, + title, + uri: url, + artworkUrl, + isrc: null, + sourceName: 'shazam', + appleMusicUrl + } + + return { + loadType: 'track', + data: { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: {} + } + } + } catch (error) { + logger('error', 'Shazam', `Failed to resolve ${url}: ${error.message}`) + return { exception: { message: error.message, severity: 'fault' } } + } + } + + async getTrackUrl(decodedTrack) { + try { + const query = `${decodedTrack.title} ${decodedTrack.author}` + const hasResults = (r) => r?.loadType === 'search' && r.data?.length + + let searchResult = await this.nodelink.sources.search( + 'youtube', + query, + 'ytmsearch' + ) + + if (!hasResults(searchResult)) { + searchResult = await this.nodelink.sources.searchWithDefault(query) + } + + if (!hasResults(searchResult)) { + return { + exception: { message: 'No alternative found.', severity: 'fault' } + } + } + + const bestMatch = getBestMatch(searchResult.data, decodedTrack, { + allowExplicit: this.allowExplicit + }) + + if (!bestMatch) { + return { + exception: { message: 'No suitable match.', severity: 'fault' } + } + } + + const stream = await this.nodelink.sources.getTrackUrl(bestMatch.info) + return { newTrack: bestMatch, ...stream } + } catch (error) { + logger('error', 'Shazam', `Failed to get track URL: ${error.message}`) + return { exception: { message: error.message, severity: 'fault' } } + } + } + + _buildTrack(item) { + if (!item?.id) return null + + const attributes = item.attributes || {} + const artwork = this._parseArtwork(attributes.artwork) + const isExplicit = attributes.contentRating === 'explicit' + + let trackUri = attributes.url || '' + if (trackUri) { + trackUri += `${trackUri.includes('?') ? '&' : '?'}explicit=${isExplicit}` + } + + const trackInfo = { + identifier: item.id, + isSeekable: true, + author: attributes.artistName || 'Unknown', + length: attributes.durationInMillis ?? 0, + isStream: false, + position: 0, + title: attributes.name || 'Unknown', + uri: trackUri, + artworkUrl: artwork, + isrc: attributes.isrc || null, + sourceName: 'shazam' + } + + return { encoded: encodeTrack(trackInfo), info: trackInfo, pluginInfo: {} } + } + + _parseArtwork(artworkData) { + if (!artworkData?.url) return null + return artworkData.url + .replace('{w}', artworkData.width) + .replace('{h}', artworkData.height) + } +} \ No newline at end of file diff --git a/src/sources/soundcloud.js b/src/sources/soundcloud.js index fb6a286..b29da25 100644 --- a/src/sources/soundcloud.js +++ b/src/sources/soundcloud.js @@ -11,22 +11,57 @@ import { const BASE_URL = 'https://api-v2.soundcloud.com' const SOUNDCLOUD_URL = 'https://soundcloud.com' const ASSET_PATTERN = /https:\/\/a-v2\.sndcdn\.com\/assets\/[a-zA-Z0-9-]+\.js/g -const CLIENT_ID_PATTERN = /client_id=([a-zA-Z0-9]{32})/ -const TRACK_PATTERN = /^https?:\/\/(?:www\.|m\.)?soundcloud\.com\/[^/\s]+\/(?:sets\/)?[^/\s]+$/ +const CLIENT_ID_PATTERN = + /(?:[?&/]?(?:client_id)[\s:=&]*"?|"data":{"id":")([A-Za-z0-9]{32})"?/ +const TRACK_PATTERN = + /^https?:\/\/(?:www\.|m\.)?soundcloud\.com\/[^/\s]+\/(?:sets\/)?[^/\s]+$/ +const SEARCH_URL_PATTERN = + /^https?:\/\/(?:www\.)?soundcloud\.com\/search(?:\/(sounds|people|albums|sets))?(?:\?|$)/ const BATCH_SIZE = 50 const DEFAULT_PRIORITY = 85 +const SEARCH_TYPE_MAP = { + track: 'tracks', + tracks: 'tracks', + sounds: 'tracks', + sound: 'tracks', + user: 'users', + users: 'users', + people: 'users', + artist: 'users', + artists: 'users', + album: 'albums', + albums: 'albums', + playlist: 'playlists', + playlists: 'playlists', + set: 'playlists', + sets: 'playlists', + all: 'all', + everything: 'all' +} + export default class SoundCloudSource { constructor(nodelink) { this.nodelink = nodelink this.baseUrl = BASE_URL this.searchTerms = ['scsearch'] - this.patterns = [TRACK_PATTERN] + this.patterns = [TRACK_PATTERN, SEARCH_URL_PATTERN] this.priority = DEFAULT_PRIORITY - this.clientId = nodelink.options?.sources?.clientId ?? null + this.clientId = nodelink.options?.sources?.soundcloud.clientId ?? null } async setup() { + const cachedId = this.nodelink.credentialManager.get('soundcloud_client_id') + if (cachedId) { + this.clientId = cachedId + logger( + 'info', + 'Sources', + `Loaded SoundCloud (clientId: ${this.clientId}) from CredentialManager` + ) + return true + } + try { const mainPage = await makeRequest(SOUNDCLOUD_URL, { method: 'GET' }) @@ -36,32 +71,57 @@ export default class SoundCloudSource { return false } - const assetMatches = [...mainPage.body.matchAll(ASSET_PATTERN)] - - if (assetMatches.length === 0) { - logger('warn', 'Sources', 'SoundCloud asset URL not found') - - return false + /** + * @type {string | undefined} + */ + let clientId + + if (mainPage.body?.match(CLIENT_ID_PATTERN)) { + clientId = mainPage.body.match(CLIENT_ID_PATTERN)[1] + logger( + 'debug', + 'Sources', + `SoundCloud client_id (${clientId}) Found from main page` + ) } try { - const clientId = await Promise.any( - assetMatches.map(async (match) => { - const assetUrl = match[0] - const asset = await http1makeRequest(assetUrl) - - if (asset && !asset.error) { - const idMatch = asset.body.match(CLIENT_ID_PATTERN) - if (idMatch?.[1]) { - return idMatch[1] + if (!clientId) { + const assetMatches = [...mainPage.body.matchAll(ASSET_PATTERN)] + + if (assetMatches.length === 0) { + logger('warn', 'Sources', 'SoundCloud asset URL not found') + + return false + } + + clientId = await Promise.any( + assetMatches.map(async (match) => { + const assetUrl = match[0] + const asset = await http1makeRequest(assetUrl) + + if (asset && !asset.error) { + const idMatch = asset.body.match(CLIENT_ID_PATTERN) + if (idMatch?.[1]) { + return idMatch[1] + } } - } - throw new Error('No client_id found in asset') - }) - ) + throw new Error('No client_id found in asset') + }) + ) + } this.clientId = clientId - logger('info', 'Sources', `Loaded SoundCloud (clientId: ${this.clientId})`) + this.nodelink.credentialManager.set( + 'soundcloud_client_id', + clientId, + 7 * 24 * 60 * 60 * 1000 + ) + logger( + 'info', + 'Sources', + `Loaded SoundCloud (clientId: ${this.clientId})` + ) return true } catch { @@ -80,37 +140,94 @@ export default class SoundCloudSource { return this.patterns.some((p) => p.test(url)) } - async search(query) { - if (!this._isValidString(query)) { + _parseSearchIdentifier(rawQuery, providedType = null) { + let searchType = 'tracks' + let searchQuery = (rawQuery || '').trim() + + const scsearchMatch = searchQuery.match(/^scsearch:?/i) + if (scsearchMatch) { + searchQuery = searchQuery.substring(scsearchMatch[0].length) + } + + const colonIndex = searchQuery.indexOf(':') + if (colonIndex > 0 && colonIndex <= 12) { + const possibleType = searchQuery.substring(0, colonIndex).toLowerCase() + const normalizedType = SEARCH_TYPE_MAP[possibleType] + + if (normalizedType) { + searchType = normalizedType + searchQuery = searchQuery.substring(colonIndex + 1).trim() + return { type: searchType, query: searchQuery } + } + } + if (providedType && typeof providedType === 'string') { + let cleanType = providedType.toLowerCase().trim() + + if (cleanType.startsWith('scsearch:')) { + cleanType = cleanType.substring(9) + } else if (cleanType === 'scsearch') { + cleanType = 'tracks' + } + + const normalizedType = SEARCH_TYPE_MAP[cleanType] + if (normalizedType) { + searchType = normalizedType + } + } + + return { type: searchType, query: searchQuery } + } + + async search(query, type = null) { + const parsed = this._parseSearchIdentifier(query, type) + const searchType = parsed.type + const searchQuery = parsed.query + + if (!this._isValidString(searchQuery)) { return this._buildError('Invalid query') } + const endpoint = this._getSearchEndpoint(searchType) + try { const params = new URLSearchParams({ - q: query, + q: searchQuery, client_id: this.clientId, limit: String(this.nodelink.options.maxSearchResults), offset: '0', - linked_partitioning: '1', - facet: 'model' + linked_partitioning: '1' }) - const req = await http1makeRequest(`${BASE_URL}/search?${params}`) + if (searchType === 'all') { + params.append('facet', 'model') + } + + const req = await http1makeRequest(`${BASE_URL}${endpoint}?${params}`) if (req.error || req.statusCode !== 200) { - return this._buildError(req.error?.message ?? `Status: ${req.statusCode}`) + return this._buildError( + req.error?.message ?? `Status: ${req.statusCode}` + ) } if (!req.body?.total_results && !req.body?.collection?.length) { - logger('debug', 'Sources', `No results for '${query}'`) + logger( + 'debug', + 'Sources', + `No SoundCloud results for '${searchQuery}' (type: ${searchType})` + ) return { loadType: 'empty', data: {} } } - const tracks = this._processTracks(req.body.collection) - logger('debug', 'Sources', `Found ${tracks.length} tracks for '${query}'`) + const data = this._processSearchResults(req.body.collection, searchType) + logger( + 'debug', + 'Sources', + `Found ${data.length} SoundCloud results for '${searchQuery}' (type: ${searchType})` + ) - return { loadType: 'search', data: tracks } + return { loadType: 'search', data } } catch (err) { this._logError('Search failed', err) @@ -118,11 +235,213 @@ export default class SoundCloudSource { } } + _getSearchEndpoint(type) { + switch (type) { + case 'tracks': + return '/search/tracks' + case 'users': + return '/search/users' + case 'albums': + return '/search/albums' + case 'playlists': + return '/search/playlists' + case 'all': + return '/search' + default: + return '/search/tracks' + } + } + + _processSearchResults(collection, type) { + if (!Array.isArray(collection)) return [] + + switch (type) { + case 'users': + return this._processUsers(collection) + case 'albums': + return this._processAlbums(collection) + case 'playlists': + return this._processPlaylists(collection) + case 'all': + return this._processAll(collection) + default: + return this._processTracks(collection) + } + } + + _processUsers(collection) { + const max = this.nodelink.options.maxSearchResults + const users = [] + + for (let i = 0; i < collection.length && users.length < max; i++) { + const user = collection[i] + if (user?.kind === 'user' || user?.username) { + const info = { + title: user.username ?? 'Unknown', + author: 'SoundCloud', + length: 0, + identifier: String(user.id ?? ''), + isSeekable: false, + isStream: false, + uri: user.permalink_url ?? '', + artworkUrl: user.avatar_url ?? null, + sourceName: 'soundcloud', + position: 0 + } + + users.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { + type: 'user', + followers: user.followers_count ?? 0, + trackCount: user.track_count ?? 0 + } + }) + } + } + + return users + } + + _processAlbums(collection) { + const max = this.nodelink.options.maxSearchResults + const albums = [] + + for (let i = 0; i < collection.length && albums.length < max; i++) { + const album = collection[i] + if (album?.kind === 'playlist' || album?.title) { + const info = { + title: album.title ?? 'Unknown', + author: album.user?.username ?? 'Unknown', + length: 0, + identifier: String(album.id ?? ''), + isSeekable: true, + isStream: false, + uri: album.permalink_url ?? '', + artworkUrl: album.artwork_url ?? null, + sourceName: 'soundcloud', + position: 0 + } + + albums.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { + type: 'album', + trackCount: album.track_count ?? 0 + } + }) + } + } + + return albums + } + + _processPlaylists(collection) { + const max = this.nodelink.options.maxSearchResults + const playlists = [] + + for (let i = 0; i < collection.length && playlists.length < max; i++) { + const playlist = collection[i] + if (playlist?.kind === 'playlist' || playlist?.title) { + const info = { + title: playlist.title ?? 'Unknown', + author: playlist.user?.username ?? 'Unknown', + length: 0, + identifier: String(playlist.id ?? ''), + isSeekable: true, + isStream: false, + uri: playlist.permalink_url ?? '', + artworkUrl: playlist.artwork_url ?? null, + sourceName: 'soundcloud', + position: 0 + } + + playlists.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { + type: 'playlist', + trackCount: playlist.track_count ?? 0 + } + }) + } + } + + return playlists + } + + _processAll(collection) { + const max = this.nodelink.options.maxSearchResults + const results = [] + + for (let i = 0; i < collection.length && results.length < max; i++) { + const item = collection[i] + + if (item?.kind === 'track') { + results.push(this._buildTrack(item)) + } else if (item?.kind === 'user') { + const info = { + title: item.username ?? 'Unknown', + author: 'SoundCloud', + length: 0, + identifier: String(item.id ?? ''), + isSeekable: false, + isStream: false, + uri: item.permalink_url ?? '', + artworkUrl: item.avatar_url ?? null, + sourceName: 'soundcloud', + position: 0 + } + + results.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { + type: 'user', + followers: item.followers_count ?? 0, + trackCount: item.track_count ?? 0 + } + }) + } else if (item?.kind === 'playlist') { + const info = { + title: item.title ?? 'Unknown', + author: item.user?.username ?? 'Unknown', + length: 0, + identifier: String(item.id ?? ''), + isSeekable: true, + isStream: false, + uri: item.permalink_url ?? '', + artworkUrl: item.artwork_url ?? null, + sourceName: 'soundcloud', + position: 0 + } + + results.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { + type: item.is_album ? 'album' : 'playlist', + trackCount: item.track_count ?? 0 + } + }) + } + } + + return results + } + async resolve(url) { if (!this._isValidString(url)) { return this._buildError('Invalid URL') } + const searchMatch = url.match(SEARCH_URL_PATTERN) + if (searchMatch) { + return this._resolveSearchUrl(url, searchMatch[1]) + } + try { const reqUrl = `${BASE_URL}/resolve?${new URLSearchParams({ url, client_id: this.clientId })}` const req = await http1makeRequest(reqUrl) @@ -130,7 +449,9 @@ export default class SoundCloudSource { if (req.statusCode === 404) return { loadType: 'empty', data: {} } if (req.error || req.statusCode !== 200) { - return this._buildError(req.error?.message ?? `Status: ${req.statusCode}`) + return this._buildError( + req.error?.message ?? `Status: ${req.statusCode}` + ) } const { body } = req @@ -153,6 +474,32 @@ export default class SoundCloudSource { } } + async _resolveSearchUrl(url, searchType) { + try { + const urlObj = new URL(url) + const query = urlObj.searchParams.get('q') + + if (!query) { + return { loadType: 'empty', data: {} } + } + + const typeMap = { + sounds: 'tracks', + people: 'users', + albums: 'albums', + sets: 'playlists' + } + + const type = typeMap[searchType] || 'all' + + return await this.search(query, type) + } catch (err) { + this._logError('Search URL resolve failed', err) + + return this._buildError(err.message) + } + } + async _resolvePlaylist(body) { const complete = [] const ids = [] @@ -262,7 +609,9 @@ export default class SoundCloudSource { if (req.error || req.statusCode !== 200) { this._logError('getTrackUrl failed', req.error) - return this._buildException(req.error?.message ?? `Status: ${req.statusCode}`) + return this._buildException( + req.error?.message ?? `Status: ${req.statusCode}` + ) } if (req.body?.errors?.[0]) { @@ -320,14 +669,18 @@ export default class SoundCloudSource { const hlsAacHigh = transcodings.find( (t) => t.format?.protocol === 'hls' && - (t.format?.mime_type?.includes('aac') || t.format?.mime_type?.includes('mp4')) && - (t.quality === 'hq' || t.preset?.includes('160') || t.url.includes('160')) + (t.format?.mime_type?.includes('aac') || + t.format?.mime_type?.includes('mp4')) && + (t.quality === 'hq' || + t.preset?.includes('160') || + t.url.includes('160')) ) const hlsAacStandard = transcodings.find( (t) => t.format?.protocol === 'hls' && - (t.format?.mime_type?.includes('aac') || t.format?.mime_type?.includes('mp4')) + (t.format?.mime_type?.includes('aac') || + t.format?.mime_type?.includes('mp4')) ) const anyHls = transcodings.find((t) => t.format?.protocol === 'hls') @@ -360,7 +713,11 @@ export default class SoundCloudSource { finalUrl = urlReq.url } else if (urlReq.statusCode === 302 || urlReq.statusCode === 301) { finalUrl = urlReq.headers?.location - } else if (urlReq.body && typeof urlReq.body === 'object' && urlReq.body.url) { + } else if ( + urlReq.body && + typeof urlReq.body === 'object' && + urlReq.body.url + ) { finalUrl = urlReq.body.url } else if (urlReq.statusCode === 200) { finalUrl = streamAuthUrl @@ -393,7 +750,7 @@ export default class SoundCloudSource { } } - async loadStream(track, url, protocol, additionalData) { + async loadStream(_track, url, protocol, _additionalData) { const stream = new PassThrough() if (protocol === 'progressive') { @@ -421,7 +778,11 @@ export default class SoundCloudSource { pipeline(res.stream, stream, (err) => { if (err) { - logger('error', 'Sources', `Progressive pipeline error: ${err.message}`) + logger( + 'error', + 'Sources', + `Progressive pipeline error: ${err.message}` + ) if (!stream.destroyed) stream.destroy(err) } else { stream.emit('finishBuffering') diff --git a/src/sources/spotify.js b/src/sources/spotify.js index ada63fe..450b6f5 100644 --- a/src/sources/spotify.js +++ b/src/sources/spotify.js @@ -1,15 +1,46 @@ -import { encodeTrack, http1makeRequest, logger } from '../utils.js' +import { + encodeTrack, + getBestMatch, + http1makeRequest, + logger +} from '../utils.js' const SPOTIFY_API_BASE_URL = 'https://api.spotify.com/v1' +const SPOTIFY_CLIENT_API_URL = 'https://spclient.wg.spotify.com' +const SPOTIFY_INTERNAL_API_URL = + 'https://api-partner.spotify.com/pathfinder/v2/query' const TOKEN_REFRESH_MARGIN = 300000 -const DURATION_TOLERANCE = 0.15 const BATCH_SIZE_DEFAULT = 5 +const QUERIES = { + getTrack: { + name: 'getTrack', + hash: '612585ae06ba435ad26369870deaae23b5c8800a256cd8a57e08eddc25a37294' + }, + getAlbum: { + name: 'getAlbum', + hash: 'b9bfabef66ed756e5e13f68a942deb60bd4125ec1f1be8cc42769dc0259b4b10' + }, + getPlaylist: { + name: 'fetchPlaylist', + hash: 'bb67e0af06e8d6f52b531f97468ee4acd44cd0f82b988e15c2ea47b1148efc77' + }, + getArtist: { + name: 'queryArtistOverview', + hash: '35648a112beb1794e39ab931365f6ae4a8d45e65396d641eeda94e4003d41497' + }, + searchDesktop: { + name: 'searchDesktop', + hash: 'fcad5a3e0d5af727fb76966f06971c19cfa2275e6ff7671196753e008611873c' + } +} + export default class SpotifySource { constructor(nodelink) { this.nodelink = nodelink this.config = nodelink.options this.searchTerms = ['spsearch'] + this.recommendationTerm = ['sprec'] this.patterns = [ /https?:\/\/(?:open\.)?spotify\.com\/(?:intl-[a-zA-Z]{2}\/)?(track|album|playlist|artist|episode|show)\/([a-zA-Z0-9]+)/ ] @@ -19,6 +50,7 @@ export default class SpotifySource { this.tokenExpiry = null this.clientId = null this.clientSecret = null + this.externalAuthUrl = null this.playlistLoadLimit = 0 this.playlistPageLoadConcurrency = BATCH_SIZE_DEFAULT this.albumLoadLimit = 0 @@ -26,14 +58,36 @@ export default class SpotifySource { this.market = 'US' this.tokenInitialized = false this.allowExplicit = true + this.anonymousToken = null } async setup() { - if (this.tokenInitialized && this._isTokenValid()) return true + this.accessToken = this.nodelink.credentialManager.get( + 'spotify_access_token' + ) + this.anonymousToken = this.nodelink.credentialManager.get( + 'spotify_anonymous_token' + ) + + const hasOfficialConfig = + this.config.sources.spotify?.clientId && + this.config.sources.spotify?.clientSecret + const hasAnonymousConfig = this.config.sources.spotify?.externalAuthUrl + + const missingOfficial = hasOfficialConfig && !this.accessToken + const missingAnonymous = hasAnonymousConfig && !this.anonymousToken + + if (!missingOfficial && !missingAnonymous) { + if (this.accessToken || this.anonymousToken) { + this.tokenInitialized = true + return true + } + } try { this.clientId = this.config.sources.spotify?.clientId this.clientSecret = this.config.sources.spotify?.clientSecret + this.externalAuthUrl = this.config.sources.spotify?.externalAuthUrl this.playlistLoadLimit = this.config.sources.spotify?.playlistLoadLimit ?? 0 this.playlistPageLoadConcurrency = @@ -46,11 +100,11 @@ export default class SpotifySource { this.market = this.config.sources.spotify?.market || 'US' this.allowExplicit = this.config.sources.spotify?.allowExplicit ?? true - if (!this.clientId || !this.clientSecret) { + if (!this.externalAuthUrl && (!this.clientId || !this.clientSecret)) { logger( 'warn', 'Spotify', - 'Client ID or Client Secret not provided. Disabling source.' + 'Neither externalAuthUrl nor Client ID/Secret provided. Disabling source.' ) return false } @@ -60,7 +114,7 @@ export default class SpotifySource { logger( 'info', 'Spotify', - `Tokens initialized successfully (playlistLoadLimit: ${this._formatLimit(this.playlistLoadLimit, 100)}, albumLoadLimit: ${this._formatLimit(this.albumLoadLimit, 50)})` + `Tokens initialized successfully. Official: ${!!this.accessToken}, Anonymous: ${!!this.anonymousToken}` ) } return success @@ -85,63 +139,135 @@ export default class SpotifySource { } async _refreshToken() { - try { - const auth = Buffer.from( - `${this.clientId}:${this.clientSecret}` - ).toString('base64') - - const { - body: tokenData, - error, - statusCode - } = await http1makeRequest('https://accounts.spotify.com/api/token', { - method: 'POST', - headers: { - Authorization: `Basic ${auth}`, - 'Content-Type': 'application/x-www-form-urlencoded' - }, - body: 'grant_type=client_credentials', - disableBodyCompression: true - }) + let success = false + + if (this.externalAuthUrl && !this.anonymousToken) { + try { + const response = await http1makeRequest(this.externalAuthUrl, { + headers: { Accept: 'application/json' }, + disableBodyCompression: true + }) + + const { body: tokenData, error, statusCode } = response + + if (!error && statusCode === 200 && tokenData?.accessToken) { + this.anonymousToken = tokenData.accessToken + const expiresMs = tokenData.accessTokenExpirationTimestampMs + ? tokenData.accessTokenExpirationTimestampMs - Date.now() + : 3600000 + + if (!this.accessToken) { + this.tokenExpiry = Date.now() + Math.max(expiresMs, 60000) + } + + this.nodelink.credentialManager.set( + 'spotify_anonymous_token', + this.anonymousToken, + Math.max(expiresMs, 60000) + ) + success = true + } else { + logger( + 'warn', + 'Spotify', + `Failed to fetch anonymous token: ${statusCode}` + ) + } + } catch (e) { + logger( + 'error', + 'Spotify', + `Anonymous token refresh failed: ${e.message}` + ) + } + } else if (this.anonymousToken) { + success = true + } - if (error || statusCode !== 200) { + if (this.clientId && this.clientSecret && !this.accessToken) { + try { + const auth = Buffer.from( + `${this.clientId}:${this.clientSecret}` + ).toString('base64') + + const { + body: tokenData, + error, + statusCode + } = await http1makeRequest('https://accounts.spotify.com/api/token', { + method: 'POST', + headers: { + Authorization: `Basic ${auth}`, + 'Content-Type': 'application/x-www-form-urlencoded' + }, + body: 'grant_type=client_credentials', + disableBodyCompression: true + }) + + if (!error && statusCode === 200) { + this.accessToken = tokenData.access_token + this.tokenExpiry = Date.now() + tokenData.expires_in * 1000 + this.nodelink.credentialManager.set( + 'spotify_access_token', + this.accessToken, + tokenData.expires_in * 1000 + ) + success = true + } else { + logger( + 'error', + 'Spotify', + `Failed to refresh official token: ${statusCode}` + ) + } + } catch (e) { logger( 'error', 'Spotify', - `Error refreshing token: ${statusCode} - ${error?.message || 'Unknown error'}` + `Official token refresh failed: ${e.message}` ) - return false } + } else if (this.accessToken) { + success = true + } - this.accessToken = tokenData.access_token - this.tokenExpiry = Date.now() + tokenData.expires_in * 1000 - this.tokenInitialized = true - return true - } catch (e) { - logger('error', 'Spotify', `Token refresh failed: ${e.message}`) - return false + if (!this.accessToken && this.anonymousToken) { + this.accessToken = this.anonymousToken } + + this.tokenInitialized = success + return success } async _apiRequest(path) { if (!this.tokenInitialized || !this._isTokenValid()) { - const success = await this.setup() - if (!success) - throw new Error('Failed to initialize Spotify for API request.') + await this.setup() } try { const url = path.startsWith('http') ? path : `${SPOTIFY_API_BASE_URL}${path}` - - const { body, statusCode } = await http1makeRequest(url, { + const { body, statusCode, headers } = await http1makeRequest(url, { headers: { Authorization: `Bearer ${this.accessToken}`, Accept: 'application/json' } }) + if (statusCode === 429) { + const retryAfter = headers['retry-after'] + ? parseInt(headers['retry-after'], 10) + : 5 + logger( + 'warn', + 'Spotify', + `Rate limited. Retrying after ${retryAfter} seconds.` + ) + await new Promise((resolve) => setTimeout(resolve, retryAfter * 1000)) + return this._apiRequest(path) + } + if (statusCode === 401) { this.tokenInitialized = false return this._apiRequest(path) @@ -159,14 +285,193 @@ export default class SpotifySource { } } + async _internalApiRequest(operation, variables) { + if (!this.tokenInitialized || !this._isTokenValid()) { + await this.setup() + } + + const token = this.anonymousToken || this.accessToken + + if (!token) { + throw new Error('No token available for internal API request.') + } + + try { + const { body, statusCode, headers } = await http1makeRequest( + SPOTIFY_INTERNAL_API_URL, + { + method: 'POST', + headers: { + Authorization: `Bearer ${token}`, + 'App-Platform': 'WebPlayer', + 'Spotify-App-Version': '1.2.81.104.g225ec0e6', + 'Content-Type': 'application/json; charset=utf-8' + }, + body: { + variables, + operationName: operation.name, + extensions: { + persistedQuery: { + version: 1, + sha256Hash: operation.hash + } + } + }, + disableBodyCompression: true + } + ) + + if (statusCode === 429) { + const retryAfter = headers['retry-after'] + ? parseInt(headers['retry-after'], 10) + : 5 + logger( + 'warn', + 'Spotify', + `Internal API Rate limited. Retrying after ${retryAfter} seconds.` + ) + await new Promise((resolve) => setTimeout(resolve, retryAfter * 1000)) + return this._internalApiRequest(operation, variables) + } + + if (statusCode === 401) { + this.tokenInitialized = false + return this._internalApiRequest(operation, variables) + } + + if (statusCode !== 200 || body.errors) { + logger( + 'error', + 'Spotify', + `Internal API error: ${statusCode} - ${JSON.stringify(body.errors || body)}` + ) + return null + } + + return body.data + } catch (e) { + logger( + 'error', + 'Spotify', + `Error in Spotify internalApiRequest: ${e.message}` + ) + return null + } + } + + async _fetchInternalPaginatedData( + operation, + uri, + totalItems, + limit, + maxPages, + concurrency, + extraVars = {} + ) { + const allItems = [] + let pagesToFetch = Math.ceil(totalItems / limit) + + if (maxPages > 0) { + pagesToFetch = Math.min(pagesToFetch, maxPages) + } + + const requests = [] + for (let i = 1; i < pagesToFetch; i++) { + requests.push({ + ...extraVars, + uri, + offset: i * limit, + limit + }) + } + + if (requests.length === 0) return allItems + + for (let i = 0; i < requests.length; i += concurrency) { + const batch = requests.slice(i, i + concurrency) + let attempts = 0 + while (attempts < 3) { + try { + this.nodelink.sendHeartbeat?.() + const results = await Promise.all( + batch.map((vars) => this._internalApiRequest(operation, vars)) + ) + for (const data of results) { + const items = + data?.playlistV2?.content?.items || + data?.albumUnion?.tracksV2?.items + if (items) { + allItems.push(...items) + } + } + break + } catch (e) { + attempts++ + if (attempts >= 3) { + logger( + 'warn', + 'Spotify', + `Failed to fetch a batch of internal pages after 3 attempts: ${e.message}` + ) + } else { + await new Promise((r) => setTimeout(r, 1500)) + } + } + } + } + + return allItems + } + + _buildTrackFromInternal(item, artworkUrl = null) { + if (!item?.uri) return null + + const id = item.uri.split(':').pop() + const isExplicit = + item.contentRating?.label === 'EXPLICIT' || item.explicit === true + + let trackUri = `https://open.spotify.com/track/${id}` + trackUri += `?explicit=${isExplicit}` + + const trackInfo = { + identifier: id, + isSeekable: true, + author: + item.artists?.items?.map((a) => a.profile?.name || a.name).join(', ') || + item.firstArtist?.items[0]?.profile?.name || + item.otherArtists.items.map((a) => a.profile.name).join(', ') || + 'Unknown', + length: + item.duration?.totalMilliseconds || + item.trackDuration?.totalMilliseconds || + 0, + isStream: false, + position: 0, + title: item.name, + uri: trackUri, + artworkUrl: + artworkUrl || + item.albumOfTrack?.coverArt?.sources?.[0]?.url || + item.album?.images?.[0]?.url || + null, + isrc: item.externalIds?.isrc || null, + sourceName: 'spotify' + } + + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: {} + } + } + _buildTrack(item, artworkUrl = null) { if (!item?.id) return null const isExplicit = item.explicit || false let trackUri = item.external_urls?.spotify || '' if (trackUri) { - trackUri += - (trackUri.includes('?') ? '&' : '?') + `explicit=${isExplicit}` + trackUri += `${trackUri.includes('?') ? '&' : '?'}explicit=${isExplicit}` } const trackInfo = { @@ -211,30 +516,76 @@ export default class SpotifySource { const batchSize = concurrency for (let i = 0; i < promises.length; i += batchSize) { const batch = promises.slice(i, i + batchSize) - try { - const results = await Promise.all(batch) - for (const page of results) { - if (page?.items) { - allItems.push(...page.items) + let attempts = 0 + while (attempts < 3) { + try { + const results = await Promise.all(batch) + for (const page of results) { + if (page?.items) { + allItems.push(...page.items) + } + } + break + } catch (e) { + attempts++ + if (attempts >= 3) { + logger( + 'warn', + 'Spotify', + `Failed to fetch a batch of pages after 3 attempts: ${e.message}` + ) + } else { + await new Promise((r) => setTimeout(r, 1500)) } } - } catch (e) { - logger( - 'warn', - 'Spotify', - `Failed to fetch a batch of pages: ${e.message}` - ) } } return allItems } - async search(query) { + async search(query, sourceTerm, searchType = 'track') { + if (this.recommendationTerm.includes(sourceTerm)) { + return this.getRecommendations(query) + } + try { const limit = this.config.maxSearchResults || 10 + + if (this.externalAuthUrl) { + const data = await this._internalApiRequest(QUERIES.searchDesktop, { + searchTerm: query, + offset: 0, + limit, + numberOfTopResults: 5, + includeAudiobooks: false, + includeArtistHasConcertsField: false, + includePreReleases: false + }) + + if (!data?.searchV2) { + return { loadType: 'empty', data: {} } + } + + const results = this._processInternalSearchResults( + data.searchV2, + searchType + ) + return results.length === 0 + ? { loadType: 'empty', data: {} } + : { loadType: 'search', data: results } + } + + const typeMap = { + track: 'track', + album: 'album', + playlist: 'playlist', + artist: 'artist' + } + const spotifyType = typeMap[searchType] || 'track' + const data = await this._apiRequest( - `/search?q=${encodeURIComponent(query)}&type=track&limit=${limit}&market=${this.market}` + `/search?q=${encodeURIComponent(query)}&type=${spotifyType}&limit=${limit}&market=${this.market}` ) if (!data || data.error) { @@ -246,24 +597,258 @@ export default class SpotifySource { } } - if (!data.tracks || data.tracks.items.length === 0) { + const results = this._processOfficialSearchResults(data, spotifyType) + return results.length === 0 + ? { loadType: 'empty', data: {} } + : { loadType: 'search', data: results } + } catch (e) { + return { + exception: { message: e.message, severity: 'fault' } + } + } + } + + async getRecommendations(query) { + try { + if (query.startsWith('mix:') || !query.includes('=')) { + let seedType = 'track' + let seed = query + + if (query.startsWith('mix:')) { + const mixMatch = query.match( + /^mix:(track|artist|album|isrc):([^:]+)$/ + ) + if (mixMatch) { + seedType = mixMatch[1] + seed = mixMatch[2] + } + } + + if ( + seedType === 'isrc' || + (seedType === 'track' && + (seed.includes(' ') || !/^[a-zA-Z0-9]{22}$/.test(seed))) + ) { + const searchResult = await this.search( + seedType === 'isrc' ? `isrc:${seed}` : seed, + 'spsearch', + 'track' + ) + if ( + searchResult.loadType === 'search' && + searchResult.data.length > 0 + ) { + seed = searchResult.data[0].info.identifier + seedType = 'track' + } else { + return { loadType: 'empty', data: {} } + } + } + + const token = this.anonymousToken || this.accessToken + if (!token) throw new Error('No token available for mix request') + + const { body: rjson, statusCode } = await http1makeRequest( + `${SPOTIFY_CLIENT_API_URL}/inspiredby-mix/v2/seed_to_playlist/spotify:${seedType}:${seed}?response-format=json`, + { + headers: { Authorization: `Bearer ${token}` }, + disableBodyCompression: true + } + ) + + if (statusCode === 200 && rjson?.mediaItems?.length > 0) { + const playlistId = rjson.mediaItems[0].uri.split(':')[2] + return this._resolvePlaylist(playlistId) + } + + if (query.startsWith('mix:')) return { loadType: 'empty', data: {} } + } + + const data = await this._apiRequest( + `/recommendations?${query.includes('=') ? query : `seed_tracks=${query}`}` + ) + if (!data || !data.tracks || data.tracks.length === 0) { return { loadType: 'empty', data: {} } } - const tracks = data.tracks.items + const tracks = data.tracks .map((item) => this._buildTrack(item)) .filter(Boolean) + return { + loadType: 'playlist', + data: { + info: { name: 'Spotify Recommendations', selectedTrack: 0 }, + pluginInfo: { type: 'recommendations' }, + tracks + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } - if (tracks.length === 0) { - return { loadType: 'empty', data: {} } + _processInternalSearchResults(searchV2, searchType) { + const results = [] + + if (searchType === 'track' && searchV2.tracksV2?.items) { + for (const item of searchV2.tracksV2.items) { + const track = this._buildTrackFromInternal(item.item.data) + if (track) results.push(track) } + } else if (searchType === 'album' && searchV2.albumsV2?.items) { + for (const item of searchV2.albumsV2.items) { + const album = item.data + const info = { + title: album.name, + author: album.artists.items.map((a) => a.profile.name).join(', '), + length: 0, + identifier: album.uri.split(':').pop(), + isSeekable: true, + isStream: false, + uri: `https://open.spotify.com/album/${album.uri.split(':').pop()}`, + artworkUrl: album.coverArt?.sources?.[0]?.url || null, + isrc: null, + sourceName: 'spotify', + position: 0 + } + results.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { type: 'album' } + }) + } + } else if (searchType === 'playlist' && searchV2.playlists?.items) { + for (const item of searchV2.playlists.items) { + const playlist = item.data + const info = { + title: playlist.name, + author: playlist.ownerV2?.data?.name || 'Unknown', + length: 0, + identifier: playlist.uri.split(':').pop(), + isSeekable: true, + isStream: false, + uri: `https://open.spotify.com/playlist/${playlist.uri.split(':').pop()}`, + artworkUrl: playlist.images?.items?.[0]?.sources?.[0]?.url || null, + isrc: null, + sourceName: 'spotify', + position: 0 + } + results.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { type: 'playlist' } + }) + } + } else if (searchType === 'artist' && searchV2.artists?.items) { + for (const item of searchV2.artists.items) { + const artist = item.data + const info = { + title: artist.profile.name, + author: 'Spotify', + length: 0, + identifier: artist.uri.split(':').pop(), + isSeekable: false, + isStream: false, + uri: `https://open.spotify.com/artist/${artist.uri.split(':').pop()}`, + artworkUrl: artist.visuals?.avatarImage?.sources?.[0]?.url || null, + isrc: null, + sourceName: 'spotify', + position: 0 + } + results.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { type: 'artist' } + }) + } + } - return { loadType: 'search', data: tracks } - } catch (e) { - return { - exception: { message: e.message, severity: 'fault' } + return results + } + + _processOfficialSearchResults(data, spotifyType) { + const results = [] + + if (spotifyType === 'track' && data.tracks?.items) { + for (const item of data.tracks.items) { + const track = this._buildTrack(item) + if (track) results.push(track) + } + } else if (spotifyType === 'album' && data.albums?.items) { + for (const item of data.albums.items) { + if (!item) continue + const info = { + title: item.name, + author: item.artists.map((a) => a.name).join(', '), + length: 0, + identifier: item.id, + isSeekable: true, + isStream: false, + uri: + item.external_urls?.spotify || + `https://open.spotify.com/album/${item.id}`, + artworkUrl: item.images?.[0]?.url || null, + isrc: null, + sourceName: 'spotify', + position: 0 + } + results.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { type: 'album' } + }) + } + } else if (spotifyType === 'playlist' && data.playlists?.items) { + for (const item of data.playlists.items) { + if (!item) continue + const info = { + title: item.name, + author: item.owner?.display_name || 'Unknown', + length: 0, + identifier: item.id, + isSeekable: true, + isStream: false, + uri: + item.external_urls?.spotify || + `https://open.spotify.com/playlist/${item.id}`, + artworkUrl: item.images?.[0]?.url || null, + isrc: null, + sourceName: 'spotify', + position: 0 + } + results.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { type: 'playlist' } + }) + } + } else if (spotifyType === 'artist' && data.artists?.items) { + for (const item of data.artists.items) { + if (!item) continue + const info = { + title: item.name, + author: 'Spotify', + length: 0, + identifier: item.id, + isSeekable: false, + isStream: false, + uri: + item.external_urls?.spotify || + `https://open.spotify.com/artist/${item.id}`, + artworkUrl: item.images?.[0]?.url || null, + isrc: null, + sourceName: 'spotify', + position: 0 + } + results.push({ + encoded: encodeTrack(info), + info, + pluginInfo: { type: 'artist' } + }) } } + + return results } async resolve(url) { @@ -301,6 +886,21 @@ export default class SpotifySource { } async _resolveTrack(id) { + if (this.externalAuthUrl) { + const data = await this._internalApiRequest(QUERIES.getTrack, { + uri: `spotify:track:${id}` + }) + if (!data?.trackUnion || data.trackUnion.__typename === 'NotFound') { + return { + exception: { message: 'Track not found.', severity: 'common' } + } + } + return { + loadType: 'track', + data: this._buildTrackFromInternal(data.trackUnion) + } + } + const data = await this._apiRequest(`/tracks/${id}?market=${this.market}`) if (!data) { return { @@ -311,6 +911,53 @@ export default class SpotifySource { } async _resolveAlbum(id) { + if (this.externalAuthUrl) { + const data = await this._internalApiRequest(QUERIES.getAlbum, { + uri: `spotify:album:${id}`, + locale: 'en', + offset: 0, + limit: 300 + }) + + if (!data?.albumUnion || data.albumUnion.__typename === 'NotFound') { + return { + exception: { message: 'Album not found.', severity: 'common' } + } + } + + const allItems = [...data.albumUnion.tracksV2.items] + const totalTracks = data.albumUnion.tracksV2.totalCount + if (totalTracks > 300) { + const additionalItems = await this._fetchInternalPaginatedData( + QUERIES.getAlbum, + `spotify:album:${id}`, + totalTracks, + 300, + this.albumLoadLimit, + this.albumPageLoadConcurrency, + { locale: 'en' } + ) + allItems.push(...additionalItems) + } + + const tracks = allItems + .map((item) => + this._buildTrackFromInternal( + item.track, + data.albumUnion.coverArt.sources[0].url + ) + ) + .filter(Boolean) + + return { + loadType: 'playlist', + data: { + info: { name: data.albumUnion.name, selectedTrack: 0 }, + tracks + } + } + } + const albumData = await this._apiRequest( `/albums/${id}?market=${this.market}` ) @@ -362,6 +1009,46 @@ export default class SpotifySource { } async _resolvePlaylist(id) { + if (this.externalAuthUrl) { + const data = await this._internalApiRequest(QUERIES.getPlaylist, { + uri: `spotify:playlist:${id}`, + offset: 0, + limit: 100, + enableWatchFeedEntrypoint: false + }) + + if (!data?.playlistV2 || data.playlistV2.__typename === 'NotFound') { + return { + exception: { message: 'Playlist not found.', severity: 'common' } + } + } + + const allItems = [...data.playlistV2.content.items] + const totalTracks = data.playlistV2.content.totalCount + const additionalItems = await this._fetchInternalPaginatedData( + QUERIES.getPlaylist, + `spotify:playlist:${id}`, + totalTracks, + 100, + this.playlistLoadLimit, + this.playlistPageLoadConcurrency, + { enableWatchFeedEntrypoint: false } + ) + allItems.push(...additionalItems) + + const tracks = allItems + .map((item) => this._buildTrackFromInternal(item.itemV2.data)) + .filter(Boolean) + + return { + loadType: 'playlist', + data: { + info: { name: data.playlistV2.name, selectedTrack: 0 }, + tracks + } + } + } + const fields = 'name,tracks(items(track(id,name,artists,duration_ms,external_urls,external_ids,explicit,album(images))),total)' const playlistData = await this._apiRequest( @@ -414,6 +1101,35 @@ export default class SpotifySource { } async _resolveArtist(id) { + if (this.externalAuthUrl) { + const data = await this._internalApiRequest(QUERIES.getArtist, { + uri: `spotify:artist:${id}`, + locale: 'en', + includePrerelease: true + }) + + if (!data?.artistUnion || data.artistUnion.__typename === 'NotFound') { + return { + exception: { message: 'Artist not found.', severity: 'common' } + } + } + + const tracks = data.artistUnion.discography.topTracks.items + .map((item) => this._buildTrackFromInternal(item.track)) + .filter(Boolean) + + return { + loadType: 'playlist', + data: { + info: { + name: `${data.artistUnion.profile.name}'s Top Tracks`, + selectedTrack: 0 + }, + tracks + } + } + } + const artist = await this._apiRequest(`/artists/${id}`) if (!artist) { return { @@ -447,22 +1163,68 @@ export default class SpotifySource { } async getTrackUrl(decodedTrack) { + if (!decodedTrack.isrc && this.accessToken) { + try { + const trackData = await this._apiRequest( + `/tracks/${decodedTrack.identifier}?market=${this.market}` + ) + if (trackData?.external_ids?.isrc) { + decodedTrack.isrc = trackData.external_ids.isrc + } + } catch (e) { + logger( + 'debug', + 'Spotify', + `Failed to fetch ISRC for ${decodedTrack.identifier} via API: ${e.message}` + ) + } + } + let isExplicit = false if (decodedTrack.uri) { try { const url = new URL(decodedTrack.uri) isExplicit = url.searchParams.get('explicit') === 'true' - } catch (e) { + } catch (_e) { // Ignore malformed URI } } - const spotifyDuration = decodedTrack.length - - const query = this._buildSearchQuery(decodedTrack, isExplicit) + const searchQuery = this._buildSearchQuery(decodedTrack, isExplicit) try { - const searchResult = await this.nodelink.sources.searchWithDefault(query) + let searchResult + if (decodedTrack.isrc) { + searchResult = await this.nodelink.sources.search( + 'youtube', + `"${decodedTrack.isrc}"`, + 'ytmsearch' + ) + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = await this.nodelink.sources.search( + 'youtube', + searchQuery, + 'ytmsearch' + ) + } + } else { + searchResult = await this.nodelink.sources.search( + 'youtube', + searchQuery, + 'ytmsearch' + ) + } + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = + await this.nodelink.sources.searchWithDefault(searchQuery) + } if ( searchResult.loadType !== 'search' || @@ -476,13 +1238,9 @@ export default class SpotifySource { } } - const bestMatch = await this._findBestMatch( - searchResult.data, - spotifyDuration, - decodedTrack, - isExplicit, - this.allowExplicit - ) + const bestMatch = getBestMatch(searchResult.data, decodedTrack, { + allowExplicit: this.allowExplicit + }) if (!bestMatch) { return { @@ -496,7 +1254,11 @@ export default class SpotifySource { const streamInfo = await this.nodelink.sources.getTrackUrl(bestMatch.info) return { newTrack: bestMatch, ...streamInfo } } catch (e) { - logger('warn', 'Spotify', `Search for "${query}" failed: ${e.message}`) + logger( + 'warn', + 'Spotify', + `Search for "${searchQuery}" failed: ${e.message}` + ) return { exception: { message: e.message, severity: 'fault' } } } } @@ -508,165 +1270,4 @@ export default class SpotifySource { } return searchQuery } - - async _findBestMatch( - list, - target, - original, - isExplicit, - allowExplicit, - retried = false - ) { - const allowedDurationDiff = target * DURATION_TOLERANCE - const normalizedOriginalTitle = this._normalize(original.title) - const normalizedOriginalAuthor = this._normalize(original.author) - - const scoredCandidates = list - .filter( - (item) => Math.abs(item.info.length - target) <= allowedDurationDiff - ) - .map((item) => { - const normalizedItemTitle = this._normalize(item.info.title) - const normalizedItemAuthor = this._normalize(item.info.author) - let score = 0 - - const originalTitleWords = new Set( - normalizedOriginalTitle.split(' ').filter((w) => w.length > 0) - ) - const itemTitleWords = new Set( - normalizedItemTitle.split(' ').filter((w) => w.length > 0) - ) - - let titleScore = 0 - for (const word of originalTitleWords) { - if (itemTitleWords.has(word)) { - titleScore++ - } - } - score += titleScore * 100 - - const originalArtists = normalizedOriginalAuthor - .split(/,\s*|\s+&\s+/) - .map((a) => a.trim()) - .filter(Boolean) - let authorMatchScore = 0 - for (const artist of originalArtists) { - if (normalizedItemAuthor.includes(artist)) { - authorMatchScore += 100 - } - } - if (authorMatchScore > 0) { - score += authorMatchScore - } else { - const authorSimilarity = this._calculateSimilarity( - normalizedOriginalAuthor, - normalizedItemAuthor - ) - score += authorSimilarity * 50 - } - - const titleWords = new Set(normalizedItemTitle.split(' ')) - const originalTitleWordsSet = new Set( - normalizedOriginalTitle.split(' ') - ) - const extraWords = [...titleWords].filter( - (word) => !originalTitleWordsSet.has(word) - ) - score -= extraWords.length * 5 - - const isCleanOrRadio = - normalizedItemTitle.includes('clean') || - normalizedItemTitle.includes('radio') - - if (isExplicit && !allowExplicit) { - if (isCleanOrRadio) { - score += 500 - } - } else if (!isExplicit) { - if (isCleanOrRadio) { - score -= 200 - } - } else { - if (isCleanOrRadio) { - score -= 200 - } - } - - return { item, score } - }) - .filter((c) => c.score >= 0) - - if (scoredCandidates.length === 0 && !retried) { - const newSearch = await this.nodelink.sources.searchWithDefault( - `${original.title} ${original.author} official video` - ) - if (newSearch.loadType !== 'search' || newSearch.data.length === 0) { - return null - } - - return await this._findBestMatch( - newSearch.data, - target, - original, - isExplicit, - allowExplicit, - true - ) - } - - if (scoredCandidates.length === 0) { - return null - } - - scoredCandidates.sort((a, b) => b.score - a.score) - - return scoredCandidates[0].item - } - - _normalize(str) { - return str - .toLowerCase() - .replace(/feat\.?/g, '') - .replace(/ft\.?/g, '') - .replace(/[^\w\s]/g, '') - .trim() - } - - _calculateSimilarity(str1, str2) { - const longer = str1.length > str2.length ? str1 : str2 - const shorter = str1.length > str2.length ? str2 : str1 - - if (longer.length === 0) return 1.0 - - const editDistance = this._levenshteinDistance(longer, shorter) - return (longer.length - editDistance) / longer.length - } - - _levenshteinDistance(str1, str2) { - const matrix = [] - - for (let i = 0; i <= str2.length; i++) { - matrix[i] = [i] - } - - for (let j = 0; j <= str1.length; j++) { - matrix[0][j] = j - } - - for (let i = 1; i <= str2.length; i++) { - for (let j = 1; j <= str1.length; j++) { - if (str2.charAt(i - 1) === str1.charAt(j - 1)) { - matrix[i][j] = matrix[i - 1][j - 1] - } else { - matrix[i][j] = Math.min( - matrix[i - 1][j - 1] + 1, - matrix[i][j - 1] + 1, - matrix[i - 1][j] + 1 - ) - } - } - } - - return matrix[str2.length][str1.length] - } } diff --git a/src/sources/telegram.js b/src/sources/telegram.js new file mode 100644 index 0000000..e348549 --- /dev/null +++ b/src/sources/telegram.js @@ -0,0 +1,210 @@ +import { encodeTrack, http1makeRequest, logger } from '../utils.js' + +export default class TelegramSource { + constructor(nodelink) { + this.nodelink = nodelink + this.patterns = [ + /https?:\/\/(?:t\.me|telegram\.me|telegram\.dog)\/([^/]+)\/(\d+)/ + ] + this.searchTerms = [] + this.priority = 80 + } + + async setup() { + logger('info', 'Sources', 'Loaded Telegram source.') + return true + } + + async search(_query) { + return { loadType: 'empty', data: {} } + } + + async resolve(url) { + const match = url.match(this.patterns[0]) + if (!match) return { loadType: 'empty', data: {} } + + const [, channelId, msgId] = match + const embedUrl = new URL(url) + embedUrl.searchParams.set('embed', '1') + + try { + const { body, error, statusCode } = await http1makeRequest( + embedUrl.toString(), + { + method: 'GET', + headers: { + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', + 'Accept-Encoding': 'identity' + } + } + ) + + if (error || statusCode !== 200) { + return { + exception: { + message: error?.message || `Status code ${statusCode}`, + severity: 'fault' + } + } + } + + const authorMatch = body.match( + /class="tgme_widget_message_author[^>]*>[\s\S]*?([^<]+)<\/span>/ + ) + const author = authorMatch ? authorMatch[1].trim() : 'Telegram Channel' + + const textMatch = body.match( + /class="tgme_widget_message_text[^>]*>([\s\S]*?)<\/div>/ + ) + const description = textMatch + ? textMatch[1] + .replace(//gi, '\n') + .replace(/<[^>]*>/g, '') + .trim() + : '' + const title = description.split('\n')[0] || `Telegram Video ${msgId}` + + const videoRegex = + /]+duration[^>]*>([\d:]+)<\/time>/) || + content.match( + /class="tgme_widget_message_video_duration">([\d:]+)<\/time>/ + ) + + if (durationMatch) { + const durationStr = durationMatch[1] + const durationParts = durationStr.split(':').map(Number) + if (durationParts.length === 2) + durationMs = (durationParts[0] * 60 + durationParts[1]) * 1000 + else if (durationParts.length === 3) + durationMs = + (durationParts[0] * 3600 + + durationParts[1] * 60 + + durationParts[2]) * + 1000 + } + + const thumbMatch = content.match( + /tgme_widget_message_video_thumb"[^>]+background-image:url\('([^']+)'\)/ + ) + const artworkUrl = thumbMatch ? thumbMatch[1] : null + + const trackInfo = { + identifier: `${channelId}/${msgId}/${tracks.length}`, + isSeekable: true, + author, + length: durationMs, + isStream: false, + position: 0, + title: + tracks.length === 0 + ? title + : `${title} (Video ${tracks.length + 1})`, + uri: url, + artworkUrl, + isrc: null, + sourceName: 'telegram' + } + + tracks.push({ + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { + directUrl: videoUrl + } + }) + } + + if (tracks.length === 0) return { loadType: 'empty', data: {} } + + const isSingle = url.includes('?single') || url.includes('&single') + if (isSingle && tracks.length > 0) { + return { loadType: 'track', data: tracks[0] } + } + + if (tracks.length === 1) return { loadType: 'track', data: tracks[0] } + + return { + loadType: 'playlist', + data: { + info: { + name: title, + selectedTrack: 0 + }, + pluginInfo: {}, + tracks + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async getTrackUrl(track) { + const result = await this.resolve(track.uri) + if (result.loadType === 'track') { + return { + url: result.data.pluginInfo.directUrl, + protocol: 'https', + format: 'mp4' + } + } + if (result.loadType === 'playlist') { + const parts = track.identifier.split('/') + const index = parseInt(parts[parts.length - 1], 10) + const selectedTrack = result.data.tracks[index] || result.data.tracks[0] + return { + url: selectedTrack.pluginInfo.directUrl, + protocol: 'https', + format: 'mp4' + } + } + return { + exception: { message: 'Failed to get track URL', severity: 'fault' } + } + } + + async loadStream(_decodedTrack, url) { + try { + const response = await http1makeRequest(url, { + method: 'GET', + streamOnly: true + }) + + if (response.error || !response.stream) { + throw response.error || new Error('Failed to get stream') + } + + const stream = new PassThrough() + + response.stream.on('data', (chunk) => stream.write(chunk)) + response.stream.on('end', () => stream.emit('finishBuffering')) + response.stream.on('error', (err) => stream.destroy(err)) + + return { stream: stream, type: 'video/mp4' } + } catch (err) { + return { exception: { message: err.message, severity: 'common' } } + } + } + + async search(_query) { + return { loadType: 'empty', data: {} } + } +} diff --git a/src/sources/tidal.js b/src/sources/tidal.js index d56c551..3ce4d0d 100644 --- a/src/sources/tidal.js +++ b/src/sources/tidal.js @@ -1,6 +1,10 @@ -import { encodeTrack, http1makeRequest, logger } from '../utils.js' -import fs from 'node:fs/promises' import path from 'node:path' +import { + encodeTrack, + getBestMatch, + http1makeRequest, + logger +} from '../utils.js' const API_BASE = 'https://api.tidal.com/v1/' const CACHE_VALIDITY_DAYS = 7 @@ -23,8 +27,9 @@ export default class TidalSource { this.nodelink = nodelink this.config = nodelink.options.sources.tidal this.searchTerms = ['tdsearch'] + this.recommendationTerm = ['tdrec'] this.patterns = [ - /^https?:\/\/(?:(?:listen|www)\.)?tidal\.com\/(?:browse\/)?(?album|track|playlist|mix)\/(?[a-zA-Z0-9\-]+)/ + /^https?:\/\/(?:(?:listen|www)\.)?tidal\.com\/(?:browse\/)?(?album|track|playlist|mix)\/(?[a-zA-Z0-9-]+)/ ] this.priority = 90 this.token = this.config?.token @@ -38,10 +43,10 @@ export default class TidalSource { async setup() { if (this.token && this.token !== 'token_here') return true - const cachedToken = await this._loadTokenFromCache().catch(() => null) + const cachedToken = this.nodelink.credentialManager.get('tidal_token') if (cachedToken) { this.token = cachedToken - logger('info', 'Tidal', 'Loaded valid token from cache.') + logger('info', 'Tidal', 'Loaded valid token from CredentialManager.') return true } @@ -54,8 +59,10 @@ export default class TidalSource { if (token) { this.token = token logger('info', 'Tidal', 'Fetched new token.') - await this._saveTokenToCache(token).catch((err) => - logger('warn', 'Tidal', `Cache save failed: ${err.message}`) + this.nodelink.credentialManager.set( + 'tidal_token', + token, + CACHE_VALIDITY_DAYS * 24 * 60 * 60 * 1000 ) } else { logger('warn', 'Tidal', 'No clientId found in remote asset') @@ -67,53 +74,6 @@ export default class TidalSource { return true } - async _loadTokenFromCache() { - try { - await fs.mkdir(path.dirname(this.tokenCachePath), { recursive: true }) - const data = await fs.readFile(this.tokenCachePath, 'utf-8') - const { token, timestamp } = JSON.parse(data) - - if (!token || !timestamp) return null - - const cacheAge = Date.now() - timestamp - const maxAge = CACHE_VALIDITY_DAYS * 24 * 60 * 60 * 1000 - - if (cacheAge > maxAge) { - logger('info', 'Tidal', 'Cached token has expired.') - return null - } - - return token - } catch (error) { - if (error.code !== 'ENOENT') { - logger('warn', 'Tidal', `Could not read token cache: ${error.message}`) - } - return null - } - } - - async _saveTokenToCache(token) { - try { - await fs.mkdir(path.dirname(this.tokenCachePath), { recursive: true }) - const dataToCache = { - token: token, - timestamp: Date.now() - } - await fs.writeFile( - this.tokenCachePath, - JSON.stringify(dataToCache), - 'utf-8' - ) - logger('info', 'Tidal', 'Saved new token to cache file.') - } catch (error) { - logger( - 'error', - 'Tidal', - `Failed to save token to cache: ${error.message}` - ) - } - } - async _getJson(endpoint, params = {}) { const url = new URL(`${API_BASE}${endpoint}`) params.countryCode = this.countryCode @@ -138,7 +98,11 @@ export default class TidalSource { return body } - async search(query) { + async search(query, sourceTerm) { + if (this.recommendationTerm.includes(sourceTerm)) { + return this.getRecommendations(query) + } + try { const limit = this.nodelink.options.maxSearchResults || 10 const data = await this._getJson('search', { @@ -189,6 +153,8 @@ export default class TidalSource { data: { info: { name: albumData.title, selectedTrack: 0 }, tracks } } } + case 'mix': + return this.getMix(id) case 'playlist': { const playlistData = await this._getJson(`playlists/${id}`) const totalTracks = playlistData.numberOfTracks @@ -270,6 +236,48 @@ export default class TidalSource { } } + async getRecommendations(query) { + let trackId = query + if (!/^[0-9]+$/.test(query)) { + const searchRes = await this.search(query, 'tdsearch') + if (searchRes.loadType === 'search' && searchRes.data.length > 0) { + trackId = searchRes.data[0].info.identifier + } else { + return { loadType: 'empty', data: {} } + } + } + + try { + const data = await this._getJson(`tracks/${trackId}`) + if (!data?.mixes?.TRACK_MIX) return { loadType: 'empty', data: {} } + + return this.getMix(data.mixes.TRACK_MIX) + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async getMix(mixId) { + try { + const data = await this._getJson(`mixes/${mixId}/items`, { limit: 100 }) + if (!data?.items?.length) return { loadType: 'empty', data: {} } + + const tracks = data.items + .map((item) => this._parseTrack(item.item || item)) + .filter(Boolean) + return { + loadType: 'playlist', + data: { + info: { name: `Mix: ${mixId}`, selectedTrack: 0 }, + pluginInfo: { type: 'recommendations' }, + tracks + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + _parseTrack(item) { if (!item || !item.id) return null const trackInfo = { @@ -297,7 +305,36 @@ export default class TidalSource { const query = `${decodedTrack.title} ${decodedTrack.author}` try { - const searchResult = await this.nodelink.sources.searchWithDefault(query) + let searchResult + + if (decodedTrack.isrc) { + searchResult = await this.nodelink.sources.search( + 'youtube', + `"${decodedTrack.isrc}"`, + 'ytmsearch' + ) + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = null + } + } + + if (!searchResult) { + searchResult = await this.nodelink.sources.search( + 'youtube', + query, + 'ytmsearch' + ) + } + + if ( + searchResult.loadType !== 'search' || + searchResult.data.length === 0 + ) { + searchResult = await this.nodelink.sources.searchWithDefault(query) + } if ( searchResult.loadType !== 'search' || @@ -311,27 +348,16 @@ export default class TidalSource { } } - const tidalDuration = decodedTrack.length - let bestMatch = null - let minDurationDiff = Infinity - - for (const track of searchResult.data) { - const durationDiff = Math.abs(track.info.length - tidalDuration) - if (durationDiff < minDurationDiff) { - minDurationDiff = durationDiff - bestMatch = track + const bestMatch = getBestMatch(searchResult.data, decodedTrack) + if (!bestMatch) { + return { + exception: { + message: 'No suitable alternative found after filtering.', + severity: 'common' + } } } - if (!bestMatch || minDurationDiff > 5000) { - logger( - 'warn', - 'Tidal', - `No close match found for "${query}". Closest diff: ${minDurationDiff}ms` - ) - bestMatch = searchResult.data[0] - } - const streamInfo = await this.nodelink.sources.getTrackUrl(bestMatch.info) return { newTrack: bestMatch, ...streamInfo } } catch (e) { @@ -340,7 +366,7 @@ export default class TidalSource { } } - async loadStream(track, url, protocol, additionalData) { + async loadStream(_track, _url, _protocol, _additionalData) { throw new Error( 'Tidal source uses mirroring and does not load streams directly.' ) diff --git a/src/sources/twitch.js b/src/sources/twitch.js index 3c9c5c5..9fb06c5 100644 --- a/src/sources/twitch.js +++ b/src/sources/twitch.js @@ -119,6 +119,20 @@ export default class TwitchSource { } async setup() { + const cachedId = this.nodelink.credentialManager.get('twitch_client_id') + const cachedDevice = this.nodelink.credentialManager.get('twitch_device_id') + + if (cachedId && cachedDevice) { + this.clientId = cachedId + this.deviceId = cachedDevice + logger( + 'info', + 'Sources', + 'Loaded Twitch parameters from CredentialManager.' + ) + return true + } + try { const { body, headers, error, statusCode } = await http1makeRequest( 'https://www.twitch.tv/', @@ -173,6 +187,20 @@ export default class TwitchSource { ) } + if (this.deviceId) { + this.nodelink.credentialManager.set( + 'twitch_device_id', + this.deviceId, + 7 * 24 * 60 * 60 * 1000 + ) + } + + this.nodelink.credentialManager.set( + 'twitch_client_id', + this.clientId, + 7 * 24 * 60 * 60 * 1000 + ) + logger( 'info', 'Sources', @@ -517,8 +545,8 @@ export default class TwitchSource { for (const quality of clipData.videoQualities) { if ( !bestQuality || - Number.parseInt(quality.quality) > - Number.parseInt(bestQuality.quality) + Number.parseInt(quality.quality, 10) > + Number.parseInt(bestQuality.quality, 10) ) { bestQuality = quality } @@ -626,7 +654,7 @@ export default class TwitchSource { return bestUrl ? { url: bestUrl } : null } - async loadStream(track, url, protocol) { + async loadStream(_track, url, protocol) { if (protocol === 'hls') { const stream = new PassThrough() manageHlsStream(url, stream) @@ -647,7 +675,7 @@ export default class TwitchSource { return { stream, type: 'mp4' } } - search(query) { + search(_query) { return { exception: { message: 'Search is not supported for Twitch', diff --git a/src/sources/vimeo.js b/src/sources/vimeo.js index 388b428..77173bb 100644 --- a/src/sources/vimeo.js +++ b/src/sources/vimeo.js @@ -1,9 +1,9 @@ -import { PassThrough } from 'node:stream' import { Buffer } from 'node:buffer' -import https from 'node:https' +import { spawn } from 'node:child_process' import http from 'node:http' +import https from 'node:https' +import { PassThrough } from 'node:stream' import zlib from 'node:zlib' -import { spawn } from 'node:child_process' import { encodeTrack, logger } from '../utils.js' const VIMEO_PATTERNS = [ @@ -779,7 +779,7 @@ export default class VimeoSource { identifier: videoId, isSeekable: true, isStream: false, - uri: `https://vimeo.com/${videoId}${hashParam ? '?h=' + hashParam : ''}`, + uri: `https://vimeo.com/${videoId}${hashParam ? `?h=${hashParam}` : ''}`, artworkUrl: metadata.artworkUrl || null, isrc: null, sourceName: 'vimeo', @@ -1262,8 +1262,7 @@ export default class VimeoSource { ) } if (!playlistUrl.includes('omit=')) { - playlistUrl += - (playlistUrl.includes('?') ? '&' : '?') + 'omit=av1-hevc' + playlistUrl += `${playlistUrl.includes('?') ? '&' : '?'}omit=av1-hevc` } try { return await this._fetchPlaylist(playlistUrl, videoId) @@ -1315,7 +1314,7 @@ export default class VimeoSource { throw new Error('No playable streams in config') } - async _fetchPlaylist(playlistUrl, videoId) { + async _fetchPlaylist(playlistUrl, _videoId) { const response = await _functions.httpRequest(playlistUrl, { headers: { Accept: '*/*', diff --git a/src/sources/vkmusic.js b/src/sources/vkmusic.js new file mode 100644 index 0000000..9cc9aca --- /dev/null +++ b/src/sources/vkmusic.js @@ -0,0 +1,740 @@ +import { Buffer } from 'node:buffer' +import crypto from 'node:crypto' +import { PassThrough } from 'node:stream' +import { encodeTrack, http1makeRequest, logger, makeRequest } from '../utils.js' + +const API_BASE = 'https://api.vk.com/method/' +const API_VERSION = '5.131' +const BASE64_CHARS = + 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMN0PQRSTUVWXYZO123456789+/=' +const USER_AGENT = + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:146.0) Gecko/20100101 Firefox/146.0' + +async function manageVkHlsStream(url, outputStream, cookie, localAddress) { + const headers = { + 'User-Agent': USER_AGENT, + Referer: 'https://vk.com/', + Origin: 'https://vk.com', + Cookie: cookie || '' + } + + try { + const { + body: manifest, + error, + statusCode + } = await http1makeRequest(url, { headers, localAddress }) + if (error || statusCode !== 200) + throw new Error(`Failed to fetch manifest: ${statusCode}`) + + const lines = manifest.split('\n').map((l) => l.trim()) + const segments = [] + let currentKey = null + let mediaSequence = 0 + + const mediaSeqLine = lines.find((l) => + l.startsWith('#EXT-X-MEDIA-SEQUENCE:') + ) + if (mediaSeqLine) mediaSequence = parseInt(mediaSeqLine.split(':')[1], 10) + + const keyMap = new Map() + + for (let i = 0; i < lines.length; i++) { + const line = lines[i] + if (line.startsWith('#EXT-X-KEY:')) { + const methodMatch = line.match(/METHOD=([^,]+)/) + const method = methodMatch ? methodMatch[1] : 'NONE' + + if (method === 'NONE') { + currentKey = null + } else { + const uriMatch = line.match(/URI="([^"]+)"/) + if (uriMatch) { + const keyUrl = new URL(uriMatch[1], url).toString() + if (!keyMap.has(keyUrl)) { + const { body: keyData, error: keyError } = await http1makeRequest( + keyUrl, + { + headers, + localAddress, + responseType: 'buffer' + } + ) + if (!keyError) keyMap.set(keyUrl, keyData) + } + currentKey = { method, data: keyMap.get(keyUrl) } + } + } + } else if (line.startsWith('#EXTINF:')) { + const segmentUrl = lines[++i] + if (segmentUrl && !segmentUrl.startsWith('#')) { + segments.push({ + url: new URL(segmentUrl, url).toString(), + key: currentKey ? { ...currentKey } : null, + sequence: mediaSequence++ + }) + } + } + } + + for (const segment of segments) { + if (outputStream.destroyed) break + + try { + const { body: encryptedData, error: segError } = await http1makeRequest( + segment.url, + { + headers, + localAddress, + responseType: 'buffer', + timeout: 10000 + } + ) + + if (segError || !encryptedData) continue + + let data = encryptedData + if ( + segment.key && + segment.key.method === 'AES-128' && + segment.key.data + ) { + try { + const iv = Buffer.alloc(16) + iv.writeUInt32BE(segment.sequence, 12) + + const decipher = crypto.createDecipheriv( + 'aes-128-cbc', + segment.key.data, + iv + ) + decipher.setAutoPadding(false) + data = Buffer.concat([ + decipher.update(encryptedData), + decipher.final() + ]) + } catch (decErr) { + logger( + 'error', + 'VKMusic-HLS', + `Decryption failed for segment ${segment.sequence}: ${decErr.message}` + ) + continue + } + } + + const mp3Payloads = [] + for (let i = 0; i <= data.length - 188; i += 188) { + if (data[i] !== 0x47) continue + const pid = ((data[i + 1] & 0x1f) << 8) | data[i + 2] + if (pid === 0x100) { + const adaptationField = (data[i + 3] & 0x20) >> 5 + const payloadExists = (data[i + 3] & 0x10) >> 4 + if (payloadExists) { + let offset = 4 + if (adaptationField) offset += data[i + 4] + 1 + if (offset < 188) { + let payload = data.slice(i + offset, i + 188) + if ( + payload[0] === 0x00 && + payload[1] === 0x00 && + payload[2] === 0x01 + ) { + const headerLen = payload[8] + payload = payload.slice(9 + headerLen) + } + mp3Payloads.push(payload) + } + } + } + } + + const mp3Buffer = Buffer.concat(mp3Payloads) + if (mp3Buffer.length > 0) { + if (!outputStream.write(mp3Buffer)) { + await new Promise((resolve) => outputStream.once('drain', resolve)) + } + } + } catch (e) { + logger( + 'error', + 'VKMusic-HLS', + `Segment processing error (Seq ${segment.sequence}): ${e.message}` + ) + } + } + + if (!outputStream.destroyed) { + outputStream.emit('finishBuffering') + outputStream.end() + } + } catch (e) { + logger('error', 'VKMusic-HLS', `HLS management failed: ${e.message}`) + if (!outputStream.destroyed) outputStream.destroy(e) + } +} + +export default class VKMusicSource { + constructor(nodelink) { + this.nodelink = nodelink + this.config = nodelink.options.sources?.vkmusic || {} + this.searchTerms = ['vksearch'] + this.recommendationTerm = ['vkrec'] + this.patterns = [ + /vk\.(?:com|ru)\/.*?[?&]z=audio_playlist(?-?\d+)_(?\d+)(?:(?:%2F|_|\/|(?:\?|&)access_hash=)(?[a-z0-9]+))?/i, + /vk\.(?:com|ru)\/(?:music\/(?:playlist|album)\/)(?-?\d+)_(?\d+)(?:(?:%2F|_|\/|(?:\?|&)access_hash=)(?[a-z0-9]+))?/i, + /vk\.(?:com|ru)\/audio(?-?\d+)_(?\d+)(?:(?:%2F|_|\/)(?[a-z0-9]+))?/i, + /vk\.(?:com|ru)\/artist\/(?[^/?#\s&]+)/i, + /vk\.(?:com|ru)\/audios(?-?\d+)/i + ] + this.priority = 80 + this.userId = 0 + this.hasToken = false + this.accessToken = this.config.userToken || null + this.tokenExpiry = 0 + this.cookie = this.config.userCookie || '' + } + + async setup() { + const cachedToken = this.nodelink.credentialManager.get('vk_access_token') + if (cachedToken) { + this.accessToken = cachedToken + this.hasToken = true + logger('info', 'VKMusic', 'Loaded access token from CredentialManager.') + return true + } + + if (this.accessToken || this.cookie) { + try { + if (!this.accessToken && this.cookie) { + await this._refreshAccessToken() + } + const response = await this._apiRequest('users.get', {}) + if (response?.[0]) { + this.userId = response[0].id + this.hasToken = true + logger( + 'info', + 'VKMusic', + `Loaded VKMusic source. Logged in as: ${response[0].first_name} ${response[0].last_name} (${this.userId})` + ) + return true + } + } catch (e) { + logger( + 'warn', + 'VKMusic', + `Auth failed: ${e.message}. Falling back to scraping mode.` + ) + } + } else { + logger('warn', 'VKMusic', 'No auth provided. Running in scraping mode.') + } + return true + } + + async _refreshAccessToken() { + if (!this.cookie) throw new Error('No cookie provided for token refresh') + const { body, error, statusCode } = await http1makeRequest( + 'https://login.vk.ru/?act=web_token', + { + method: 'POST', + headers: { + Host: 'login.vk.ru', + 'User-Agent': USER_AGENT, + Accept: '*/*', + 'Accept-Language': 'en-US,en;q=0.5', + Referer: 'https://vk.ru/', + 'Content-Type': 'application/x-www-form-urlencoded', + Origin: 'https://vk.ru', + Cookie: this.cookie, + Connection: 'keep-alive' + }, + body: 'version=1&app_id=6287487', + disableBodyCompression: true, + localAddress: this.nodelink.routePlanner?.getIP() + } + ) + if (error || statusCode !== 200) + throw new Error( + `Failed to refresh token: ${error?.message || statusCode}` + ) + if (body.type === 'okay' && body.data?.access_token) { + this.accessToken = body.data.access_token + this.tokenExpiry = body.data.expires * 1000 + this.userId = body.data.user_id + this.nodelink.credentialManager.set( + 'vk_access_token', + this.accessToken, + body.data.expires * 1000 - Date.now() + ) + return this.accessToken + } + throw new Error(`Invalid act=web_token response: ${JSON.stringify(body)}`) + } + + async search(query, sourceTerm) { + if (this.recommendationTerm.includes(sourceTerm)) { + return this.getRecommendations(query) + } + + if (!this.hasToken) + return { + exception: { + message: 'VKMusic search requires valid auth.', + severity: 'common' + } + } + try { + const res = await this._apiRequest('audio.search', { + q: query, + count: this.nodelink.options.maxSearchResults || 10, + sort: 2, + auto_complete: 1 + }) + if (!res || !res.items || res.items.length === 0) + return { loadType: 'empty', data: {} } + const tracks = res.items.map((item) => this.buildTrack(item)) + return { loadType: 'search', data: tracks } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async getRecommendations(query) { + if (!this.hasToken) + return { + exception: { + message: 'VKMusic recommendations require valid auth.', + severity: 'common' + } + } + + let audioId = query + if (!/^-?\d+_\d+$/.test(query)) { + const searchRes = await this.search(query, 'vksearch') + if (searchRes.loadType === 'search' && searchRes.data.length > 0) { + audioId = searchRes.data[0].info.identifier + } else { + return { loadType: 'empty', data: {} } + } + } + + try { + const res = await this._apiRequest('audio.getRecommendations', { + target_audio: audioId, + count: this.config.recommendationsLoadLimit || 10 + }) + if (!res || !res.items || res.items.length === 0) + return { loadType: 'empty', data: {} } + const tracks = res.items.map((item) => this.buildTrack(item)) + return { + loadType: 'playlist', + data: { + info: { name: 'VK Music Recommendations', selectedTrack: 0 }, + pluginInfo: { type: 'recommendations' }, + tracks + } + } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async resolve(url) { + const playlistMatch = + url.match(this.patterns[0]) || url.match(this.patterns[1]) + if (playlistMatch) { + const { owner, id, hash } = playlistMatch.groups + return this._resolvePlaylist(owner, id, hash, url) + } + + const trackMatch = url.match(this.patterns[2]) + if (trackMatch) return this._resolveTrack(url) + + const artistMatch = url.match(this.patterns[3]) + if (artistMatch) { + const artistId = artistMatch.groups.id + if (this.hasToken) { + const res = await this.search(decodeURIComponent(artistId)) + if (res.loadType === 'search') { + return { + loadType: 'playlist', + data: { + info: { + name: `Artist: ${decodeURIComponent(artistId)}`, + selectedTrack: 0 + }, + tracks: res.data + } + } + } + return res + } + return this._scrapePlaylist(url) + } + + const audiosMatch = url.match(this.patterns[4]) + if (audiosMatch) + return this._resolvePlaylist(audiosMatch.groups.id, '-1', null, url) + + return { loadType: 'empty', data: {} } + } + async _resolvePlaylist(ownerId, playlistId, accessKey, url) { + if (this.hasToken) { + try { + const params = { + owner_id: ownerId, + album_id: playlistId, + count: this.nodelink.options.maxAlbumPlaylistLength || 100 + } + if (accessKey) params.access_key = accessKey + const res = await this._apiRequest('audio.get', params) + if (res?.items && res.items.length > 0) { + let playlistTitle = `VK Playlist ${ownerId}_${playlistId}` + try { + const plList = await this._apiRequest('audio.getPlaylists', { + owner_id: ownerId, + count: 50 + }) + const pl = plList?.items?.find((p) => p.id === playlistId) + if (pl) playlistTitle = pl.title + } catch {} + const tracks = res.items.map((item) => this.buildTrack(item)) + return { + loadType: 'playlist', + data: { info: { name: playlistTitle, selectedTrack: 0 }, tracks } + } + } + } catch (e) { + logger( + 'debug', + 'VKMusic', + `API playlist resolution failed: ${e.message}. Falling back to scraping.` + ) + } + } + return this._scrapePlaylist(url) + } + + async _resolveTrack(url) { + if (this.hasToken) { + try { + const trackMatch = url.match(this.patterns[1]) + const { owner, id, hash } = trackMatch.groups + const audios = `${owner}_${id}${hash ? `_${hash}` : ''}` + const res = await this._apiRequest('audio.getById', { audios }) + if (res?.[0]) + return { loadType: 'track', data: this.buildTrack(res[0]) } + } catch (e) { + logger( + 'debug', + 'VKMusic', + `API track resolution failed: ${e.message}. Falling back to scraping.` + ) + } + } + return this._scrapeTrack(url) + } + + async _scrapeTrack(url) { + try { + const { body, statusCode } = await http1makeRequest(url, { + headers: { 'User-Agent': USER_AGENT } + }) + if (statusCode !== 200) throw new Error(`Status ${statusCode}`) + const dataAudioMatch = body.match(/data-audio="([^"]+)"/) + if (!dataAudioMatch) { + const execMatch = body.match( + /class="AudioPlayerBlock__root"[^>]+data-exec="([^"]+)"/ + ) + if (execMatch) { + const escapedJson = execMatch[1].replace(/"/g, '"') + const execData = JSON.parse(escapedJson) + const meta = execData?.['AudioPlayerBlock/init']?.firstAudio + if (meta) return { loadType: 'track', data: this._parseMeta(meta) } + } + throw new Error('Could not find track data in page') + } + const data = JSON.parse(dataAudioMatch[1].replace(/"/g, '"')) + return { loadType: 'track', data: this._parseMeta(data) } + } catch (e) { + return { + exception: { + message: `Scraping failed: ${e.message}`, + severity: 'fault' + } + } + } + } + + async _scrapePlaylist(url) { + try { + const { body, statusCode } = await http1makeRequest(url, { + headers: { 'User-Agent': USER_AGENT } + }) + if (statusCode !== 200) throw new Error(`Status ${statusCode}`) + const playlistTitleMatch = + body.match(/]*>([^<]+)<\/h1>/) || + body.match(/class="AudioPlaylistSnippet__title"[^>]*>([^<]+)<\/div>/) + const playlistTitle = playlistTitleMatch + ? playlistTitleMatch[1].trim() + : 'VK Playlist' + const tracks = [] + const audioMatches = body.matchAll(/data-audio="([^"]+)"/g) + for (const match of audioMatches) { + try { + const data = JSON.parse(match[1].replace(/"/g, '"')) + const parsed = this._parseMeta(data) + if (parsed) tracks.push(parsed) + } catch (_e) {} + } + if (tracks.length === 0) { + const execMatch = body.match(/data-exec="([^"]+)"/) + if (execMatch) { + try { + const execData = JSON.parse(execMatch[1].replace(/"/g, '"')) + const list = + execData?.['AudioPlaylistSnippet/init']?.playlist?.list || + (execData?.['AudioPlayerBlock/init']?.firstAudio + ? [execData['AudioPlayerBlock/init'].firstAudio] + : []) + for (const meta of list) { + const parsed = this._parseMeta(meta) + if (parsed) tracks.push(parsed) + } + } catch (_e) {} + } + } + if (tracks.length === 0) return { loadType: 'empty', data: {} } + return { + loadType: 'playlist', + data: { info: { name: playlistTitle, selectedTrack: 0 }, tracks } + } + } catch (e) { + return { + exception: { + message: `Playlist scraping failed: ${e.message}`, + severity: 'fault' + } + } + } + } + + _parseMeta(data) { + if (!Array.isArray(data) || data.length < 6) return null + const trackId = data[0], + ownerId = data[1], + title = data[3], + artist = data[4], + duration = data[5] + let rawUrl = data[2] + const coverUrl = data[14] ? data[14].split(',')[0] : null + if (rawUrl?.includes('audio_api_unavailable')) + rawUrl = this._unmask_url(rawUrl, this.userId) + const id = `${ownerId}_${trackId}` + const trackInfo = { + identifier: id, + isSeekable: true, + author: artist, + length: duration * 1000, + isStream: false, + position: 0, + title, + uri: `https://vk.com/audio${id}`, + artworkUrl: coverUrl, + isrc: null, + sourceName: 'vkmusic' + } + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { streamUrl: rawUrl } + } + } + + buildTrack(item) { + const artist = item.artist, + title = item.title, + duration = item.duration * 1000, + id = `${item.owner_id}_${item.id}` + let uri = item.url || '' + if (uri?.includes('audio_api_unavailable')) + uri = this._unmask_url(uri, this.userId) + const trackInfo = { + identifier: id, + isSeekable: true, + author: artist, + length: duration, + isStream: false, + position: 0, + title, + uri: `https://vk.com/audio${id}`, + artworkUrl: + item.album?.thumb?.photo_600 || item.album?.thumb?.photo_300 || null, + isrc: null, + sourceName: 'vkmusic' + } + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { access_key: item.access_key, streamUrl: uri } + } + } + + async getTrackUrl(decodedTrack) { + let url = decodedTrack.pluginInfo?.streamUrl + if (!url && this.hasToken) { + try { + let audios = decodedTrack.identifier + if (decodedTrack.pluginInfo?.access_key) + audios += `_${decodedTrack.pluginInfo.access_key}` + const res = await this._apiRequest('audio.getById', { audios }) + if (res && res.length > 0) { + url = res[0].url + if (url?.includes('audio_api_unavailable')) + url = this._unmask_url(url, this.userId) + } + } catch (_e) {} + } + if (!url) { + const scrapeRes = await this._scrapeTrack( + `https://vk.com/audio${decodedTrack.identifier}` + ) + if ( + scrapeRes.loadType === 'track' && + scrapeRes.data.pluginInfo?.streamUrl + ) + url = scrapeRes.data.pluginInfo.streamUrl + } + if (url) { + const isHls = url.includes('.m3u8') + return { + url, + protocol: isHls ? 'hls' : 'https', + format: isHls ? 'mpegts' : 'mp3' + } + } + const query = `${decodedTrack.title} ${decodedTrack.author}` + const searchRes = await this.nodelink.sources.searchWithDefault(query) + if (searchRes.loadType === 'search' && searchRes.data.length > 0) { + const best = searchRes.data[0] + const streamInfo = await this.nodelink.sources.getTrackUrl(best.info) + return { newTrack: best, ...streamInfo } + } + return { + exception: { message: 'Failed to retrieve track URL.', severity: 'fault' } + } + } + + async loadStream(_track, url, _protocol) { + try { + if (url.includes('.m3u8')) { + const stream = new PassThrough() + manageVkHlsStream( + url, + stream, + this.cookie, + this.nodelink.routePlanner?.getIP() + ) + return { stream, type: 'mp3' } + } + const { stream, error } = await http1makeRequest(url, { + method: 'GET', + streamOnly: true, + headers: { 'User-Agent': USER_AGENT, Cookie: this.cookie } + }) + if (error) throw error + return { stream, type: 'mp3' } + } catch (e) { + return { exception: { message: e.message, severity: 'fault' } } + } + } + + async _apiRequest(method, params) { + if ( + this.cookie && + (!this.accessToken || + (this.tokenExpiry && Date.now() >= this.tokenExpiry - 60000)) + ) + await this._refreshAccessToken() + const url = new URL(API_BASE + method) + params.access_token = this.accessToken + params.v = API_VERSION + Object.keys(params).forEach((key) => + url.searchParams.append(key, params[key]) + ) + const { body, error, statusCode } = await makeRequest(url.toString(), { + method: 'GET', + headers: { + 'User-Agent': + 'KateMobileAndroid/56 lite-460 (Android 4.4.2; SDK 19; x86; unknown Android SDK built for x86; en)' + }, + localAddress: this.nodelink.routePlanner?.getIP() + }) + if (error || statusCode !== 200) { + if ( + (statusCode === 401 || (body && body.error?.error_code === 5)) && + this.cookie + ) { + await this._refreshAccessToken() + return this._apiRequest(method, params) + } + throw new Error(error?.message || `HTTP ${statusCode}`) + } + if (body.error) { + if (body.error.error_code === 5 && this.cookie) { + await this._refreshAccessToken() + return this._apiRequest(method, params) + } + throw new Error( + `VK API Error ${body.error.error_code}: ${body.error.error_msg}` + ) + } + return body.response + } + + _b64_decode(enc) { + let dec = '', + e = 0, + n = 0 + for (let i = 0; i < enc.length; i++) { + const c = enc[i], + r = BASE64_CHARS.indexOf(c) + if (r === -1) continue + const cond = n % 4 + e = cond ? 64 * e + r : r + n++ + if (cond) dec += String.fromCharCode(255 & (e >> ((-2 * n) & 6))) + } + return dec + } + + _unmask_url(mask_url, vk_id) { + if (!mask_url.includes('audio_api_unavailable')) return mask_url + try { + const parts = mask_url.split('?extra=')[1].split('#'), + extra0 = parts[0], + extra1 = parts[1] + const split1 = this._b64_decode(extra1).split(String.fromCharCode(11)), + base = split1[1] + const maskUrlArr = this._b64_decode(extra0).split('') + const urlLen = maskUrlArr.length, + indexes = new Array(urlLen) + let index = parseInt(base, 10) ^ vk_id + for (let n = urlLen - 1; n >= 0; n--) { + index = ((urlLen * (n + 1)) ^ (index + n)) % urlLen + indexes[n] = index + } + for (let n = 1; n < urlLen; n++) { + const c = maskUrlArr[n], + idx = indexes[urlLen - 1 - n] + ;(maskUrlArr[n] = maskUrlArr[idx]), (maskUrlArr[idx] = c) + } + return maskUrlArr.join('') + } catch (e) { + logger('error', 'VKMusic', `Failed to unmask URL: ${e.message}`) + return null + } + } +} diff --git a/src/sources/youtube/CipherManager.js b/src/sources/youtube/CipherManager.js index f6017dc..442467e 100644 --- a/src/sources/youtube/CipherManager.js +++ b/src/sources/youtube/CipherManager.js @@ -1,5 +1,9 @@ -import { URLSearchParams } from 'node:url' -import { http1makeRequest, logger, makeRequest, getVersion } from '../../utils.js' +import { + getVersion, + http1makeRequest, + logger, + makeRequest +} from '../../utils.js' const CACHE_DURATION_MS = 12 * 60 * 60 * 1000 const VERSION = getVersion() @@ -23,11 +27,14 @@ export default class CipherManager { this.explicitPlayerScriptUrl = null this.userAgent = `nodelink/${VERSION} (https://github.com/PerformanC/NodeLink)` this.stsCache = new Map() - - setInterval(() => { - this.stsCache.clear() - logger('debug', 'YouTube-Cipher', 'Cleared STS cache (12h interval)') - }, 12 * 60 * 60 * 1000).unref() + + setInterval( + () => { + this.stsCache.clear() + logger('debug', 'YouTube-Cipher', 'Cleared STS cache (12h interval)') + }, + 12 * 60 * 60 * 1000 + ).unref() } setPlayerScriptUrl(url) { @@ -45,6 +52,14 @@ export default class CipherManager { return this.getCachedPlayerScript() } + const cachedUrl = this.nodelink.credentialManager.get( + 'yt_player_script_url' + ) + if (cachedUrl && !this.explicitPlayerScriptUrl) { + this.cachedPlayerScript = new CachedPlayerScript(cachedUrl) + return this.cachedPlayerScript + } + this.cipherLoadLock = true try { if ( @@ -106,6 +121,12 @@ export default class CipherManager { return this.stsCache.get(playerUrl) } + const cachedSts = this.nodelink.credentialManager.get(`yt_sts_${playerUrl}`) + if (cachedSts) { + this.stsCache.set(playerUrl, cachedSts) + return cachedSts + } + if (!this.config.url) { const { body: scriptContent, @@ -145,6 +166,11 @@ export default class CipherManager { ) this.stsCache.set(playerUrl, sts) + this.nodelink.credentialManager.set( + `yt_sts_${playerUrl}`, + sts, + 12 * 60 * 60 * 1000 + ) return sts } @@ -224,7 +250,7 @@ export default class CipherManager { `Cipher server at ${this.config.url} is online.` ) return true - } catch (e) { + } catch (_e) { logger( 'warn', 'YouTube-Cipher', @@ -240,7 +266,7 @@ export default class CipherManager { nParam, signatureKey, playerScript, - context + _context ) { if (!this.config.url) { throw new Error('Remote cipher URL is not configured.') diff --git a/src/sources/youtube/OAuth.js b/src/sources/youtube/OAuth.js index d542ca1..4392574 100644 --- a/src/sources/youtube/OAuth.js +++ b/src/sources/youtube/OAuth.js @@ -22,19 +22,21 @@ export default class OAuth { } } - this.refreshToken = foundToken ? (Array.isArray(foundToken) ? foundToken : [foundToken]) : [] + this.refreshToken = foundToken + ? Array.isArray(foundToken) + ? foundToken + : [foundToken] + : [] this.currentTokenIndex = 0 this.accessToken = null this.tokenExpiry = 0 } async getAccessToken() { - if (!this.refreshToken.length) { - logger( - 'debug', - 'YouTube-OAuth', - 'No refresh token configured. Skipping authentication.' - ) + if ( + !this.refreshToken.length || + (this.refreshToken.length === 1 && this.refreshToken[0] === '') + ) { return null } @@ -42,15 +44,27 @@ export default class OAuth { return this.accessToken } - logger('info', 'YouTube-OAuth', 'Refreshing access token...') + const cachedToken = this.nodelink.credentialManager.get('yt_access_token') + if (cachedToken) { + this.accessToken = cachedToken + this.tokenExpiry = Date.now() + 3500000 // Assume ~1h from now + return this.accessToken + } const maxTokenAttempts = this.refreshToken.length let tokensTried = 0 while (tokensTried < maxTokenAttempts) { const currentToken = this.refreshToken[this.currentTokenIndex] + if (!currentToken) { + this.currentTokenIndex = + (this.currentTokenIndex + 1) % this.refreshToken.length + tokensTried++ + continue + } + let attempts = 0 - + while (attempts < 3) { attempts++ try { @@ -70,133 +84,210 @@ export default class OAuth { if (!error && statusCode === 200 && body.access_token) { this.accessToken = body.access_token this.tokenExpiry = Date.now() + body.expires_in * 1000 - 30000 - logger('info', 'YouTube-OAuth', `Successfully refreshed access token using token index ${this.currentTokenIndex}.`) + this.nodelink.credentialManager.set( + 'yt_access_token', + this.accessToken, + body.expires_in * 1000 - 30000 + ) return this.accessToken } + } catch (_e) {} - logger('warn', 'YouTube-OAuth', `Token refresh failed (Attempt ${attempts}/3, Token Index ${this.currentTokenIndex}): ${error?.message || body?.error_description || statusCode}`) - } catch (e) { - logger('warn', 'YouTube-OAuth', `Token refresh exception (Attempt ${attempts}/3, Token Index ${this.currentTokenIndex}): ${e.message}`) - } - - await new Promise(r => setTimeout(r, 2000)) + await new Promise((r) => setTimeout(r, 2000)) } - logger('warn', 'YouTube-OAuth', `Failed to refresh access token with token index ${this.currentTokenIndex}. Trying next token if available.`) - this.currentTokenIndex = (this.currentTokenIndex + 1) % this.refreshToken.length + this.currentTokenIndex = + (this.currentTokenIndex + 1) % this.refreshToken.length tokensTried++ } - logger('error', 'YouTube-OAuth', 'All refresh tokens failed.') this.accessToken = null this.tokenExpiry = 0 return null } - async getAuthHeaders() { - const token = await this.getAccessToken() - if (!token) return {} - - return { - Authorization: `Bearer ${token}` - } - } - - static async acquireRefreshToken() { - logger( - 'info', - 'YouTube-OAuth', - 'Step 1: Requesting device code from Google...' - ) - const data = { - client_id: CLIENT_ID, - scope: SCOPES + async validateCurrentTokens() { + if ( + !this.refreshToken.length || + (this.refreshToken.length === 1 && this.refreshToken[0] === '') + ) { + return false } - try { - const { - body: response, - error, - statusCode - } = await makeRequest('https://www.youtube.com/o/oauth2/device/code', { - method: 'POST', - body: data - }) - - if (error || statusCode !== 200 || response.error) { - throw new Error( - `Error obtaining device code: ${error?.message || response.error_description || 'Invalid response'}` - ) - } - - logger( - 'info', - 'YouTube-OAuth', - '==================================================================' - ) - logger( - 'info', - 'YouTube-OAuth', - '🚨 ALERT: DO NOT USE YOUR MAIN GOOGLE ACCOUNT! USE A SECONDARY OR BURNER ACCOUNT ONLY!' - ) - logger( - 'info', - 'YouTube-OAuth', - 'To authorize, visit the following URL in your browser:' - ) - logger('info', 'YouTube-OAuth', `URL: ${response.verification_url}`) - logger( - 'info', - 'YouTube-OAuth', - `And enter the code: ${response.user_code}` - ) + const token = await this.getAccessToken() + if (token) { logger( 'info', - 'YouTube-OAuth', - '==================================================================' - ) - logger('info', 'YouTube-OAuth', 'Waiting for authorization...') - - const refreshToken = await OAuth.pollForToken( - response.device_code, - response.interval + 'OAuth', + '\x1b[33m==================================================================\x1b[0m' ) - logger( 'info', - 'YouTube-OAuth', - '==================================================================' + 'OAuth', + '\x1b[1m\x1b[32mYOUR refreshtoken IS VALID :)\x1b[0m' ) - logger('info', 'YouTube-OAuth', 'Authorization granted successfully!') logger( 'info', - 'YouTube-OAuth', - '==================================================================' + 'OAuth', + '\x1b[37mPlease disable the \x1b[33mgetOAuthToken\x1b[37m option if you restarted by accident\x1b[0m' ) logger( 'info', - 'YouTube-OAuth', - 'Refresh Token (use this to obtain new Access Tokens in the future):' + 'OAuth', + "\x1b[37mand didn't change it to \x1b[31mfalse\x1b[37m. If you want to get a second token\x1b[0m" ) - logger('info', 'YouTube-OAuth', refreshToken) logger( 'info', - 'YouTube-OAuth', - 'Save your Refresh Token in a secure place!' + 'OAuth', + '\x1b[37mfor fallback, follow the same steps and add \x1b[32m, ""\x1b[37m for this new token below.\x1b[0m' ) logger( 'info', - 'YouTube-OAuth', - '==================================================================' + 'OAuth', + '\x1b[33m==================================================================\x1b[0m' ) + return true + } + return false + } - return refreshToken - } catch (error) { - logger('error', 'YouTube-OAuth', `Failed in Step 1: ${error.message}`) - throw error + async getAuthHeaders() { + const token = await this.getAccessToken() + if (!token) return {} + + return { + Authorization: `Bearer ${token}` } } + static async acquireRefreshToken() { + const data = { + client_id: CLIENT_ID, + scope: SCOPES + } + const { + body: response, + error, + statusCode + } = await makeRequest('https://www.youtube.com/o/oauth2/device/code', { + method: 'POST', + body: data + }) + + if (error || statusCode !== 200 || response.error) { + throw new Error( + `Error obtaining device code: ${error?.message || response.error_description || 'Invalid response'}` + ) + } + + logger( + 'info', + 'OAuth', + '\x1b[33m==================================================================\x1b[0m' + ) + logger( + 'info', + 'OAuth', + '\x1b[1m\x1b[31m🚨 ALERT: DO NOT USE YOUR MAIN GOOGLE ACCOUNT! USE A SECONDARY OR BURNER ACCOUNT ONLY!\x1b[0m' + ) + logger( + 'info', + 'OAuth', + '\x1b[36mTo authorize, visit the following URL in your browser:\x1b[0m' + ) + logger( + 'info', + 'OAuth', + `\x1b[1m\x1b[32mURL: ${response.verification_url}\x1b[0m` + ) + logger( + 'info', + 'OAuth', + `\x1b[36mAnd enter the code: \x1b[1m\x1b[37m${response.user_code}\x1b[0m` + ) + logger( + 'info', + 'OAuth', + '\x1b[33m==================================================================\x1b[0m' + ) + + const refreshToken = await OAuth.pollForToken( + response.device_code, + response.interval + ) + + logger( + 'info', + 'OAuth', + '\x1b[33m==================================================================\x1b[0m' + ) + logger( + 'info', + 'OAuth', + '\x1b[1m\x1b[32mAuthorization granted successfully! :)\x1b[0m' + ) + logger( + 'info', + 'OAuth', + '\x1b[33m==================================================================\x1b[0m' + ) + logger( + 'info', + 'OAuth', + '\x1b[36mCopy your Refresh Token and paste it in your \x1b[1mconfig.js\x1b[36m:\x1b[0m' + ) + logger('info', 'OAuth', `\x1b[1m\x1b[37m${refreshToken}\x1b[0m`) + logger( + 'info', + 'OAuth', + '\x1b[33m==================================================================\x1b[0m' + ) + logger('info', 'OAuth', '\x1b[1m\x1b[31mIMPORTANT:\x1b[0m') + logger( + 'info', + 'OAuth', + '\x1b[37mAfter pasting the token, you \x1b[1mMUST\x1b[37m set \x1b[33mgetOAuthToken\x1b[37m to \x1b[31mfalse\x1b[0m' + ) + logger( + 'info', + 'OAuth', + '\x1b[37motherwise the server will keep trying to obtain a new token on every restart.\x1b[0m' + ) + logger( + 'info', + 'OAuth', + '\x1b[33mExample JSON structure for your config.js:\x1b[0m' + ) + + const exampleJson = JSON.stringify( + { + sources: { + youtube: { + getOAuthToken: false, + clients: { + settings: { + TV: { + refreshToken: [refreshToken] + } + } + } + } + } + }, + null, + 2 + ) + + logger('info', 'OAuth', `\x1b[32m${exampleJson}\x1b[0m`) + logger( + 'info', + 'OAuth', + '\x1b[33m==================================================================\x1b[0m\n' + ) + + return refreshToken + } + static async pollForToken(deviceCode, interval) { const data = { client_id: CLIENT_ID, @@ -207,6 +298,11 @@ export default class OAuth { return new Promise((resolve, reject) => { const poll = async () => { + logger( + 'info', + 'OAuth', + '\x1b[35m>>> AWAITING...\x1b[0m waiting for token :P' + ) try { const { body: response, @@ -223,43 +319,24 @@ export default class OAuth { } else if (response.error === 'slow_down') { setTimeout(poll, (interval + 5) * 1000) } else if (response.error === 'expired_token') { - logger( - 'error', - 'YouTube-OAuth', - 'Authorization code expired. Please run the script again.' - ) reject(new Error('Authorization code expired.')) } else if (response.error === 'access_denied') { - logger( - 'error', - 'YouTube-OAuth', - 'Access denied. Authorization was cancelled.' - ) reject(new Error('Access denied.')) } else { - logger( - 'error', - 'YouTube-OAuth', - `Error during polling: ${response.error_description}` - ) reject( new Error(`Error during polling: ${response.error_description}`) ) } } else { + logger('info', 'OAuth', '>>> TOKEN RECEIVED :)') resolve(response.refresh_token) } - } catch (error) { - logger( - 'error', - 'YouTube-OAuth', - `Failed in Step 2 (Polling): ${error.message}` - ) + } catch (_error) { setTimeout(poll, interval * 1000) } } - setTimeout(poll, interval * 1000) + poll() }) } } diff --git a/src/sources/youtube/YouTube.js b/src/sources/youtube/YouTube.js index ab4f69c..27b27b4 100644 --- a/src/sources/youtube/YouTube.js +++ b/src/sources/youtube/YouTube.js @@ -26,21 +26,23 @@ async function _manageYoutubeHlsStream( ) { const segmentQueue = [] const processedSegments = new Set() - const processedOrder = [] + const MAX_PROCESSED_TRACK = 100 + const processedOrder = new Array(MAX_PROCESSED_TRACK) + let processedIndex = 0 let cleanedUp = false let playlistEnded = false const MAX_LIVE_QUEUE_SIZE = 15 - const MAX_PROCESSED_TRACK = 100 - const rememberSegment = (url) => { - if (processedSegments.has(url)) return false - processedSegments.add(url) - processedOrder.push(url) + const rememberSegment = (key) => { + if (processedSegments.has(key)) return false + + const old = processedOrder[processedIndex] + if (old !== undefined) processedSegments.delete(old) + + processedSegments.add(key) + processedOrder[processedIndex] = key + processedIndex = (processedIndex + 1) % MAX_PROCESSED_TRACK - if (processedOrder.length > MAX_PROCESSED_TRACK) { - const old = processedOrder.shift() - if (old) processedSegments.delete(old) - } return true } @@ -116,11 +118,14 @@ async function _manageYoutubeHlsStream( } const currentSegments = [] + let segIdx = 0 for (let i = 0; i < lines.length; i++) { if (lines[i].startsWith('#EXTINF:')) { const segmentUrl = lines[i + 1] if (segmentUrl && !segmentUrl.startsWith('#')) { - currentSegments.push(new URL(segmentUrl, playlistUrl).toString()) + const url = new URL(segmentUrl, playlistUrl).toString() + const seq = mediaSequence + segIdx++ + currentSegments.push({ url, seq }) } } } @@ -143,24 +148,24 @@ async function _manageYoutubeHlsStream( const segmentsToTake = isLive ? 3 : PLAYLIST_FALLBACK_SEGMENTS const startIdx = Math.max(0, currentSegments.length - segmentsToTake) for (let i = startIdx; i < currentSegments.length; i++) { - const url = currentSegments[i] - if (rememberSegment(url)) { - segmentQueue.push(url) + const seg = currentSegments[i] + const key = isLive ? seg.seq : seg.url + if (rememberSegment(key)) { + segmentQueue.push(seg) } } isFirstFetch = false } else { - for (const url of currentSegments) { - if (!processedSegments.has(url)) { + for (const seg of currentSegments) { + const key = isLive ? seg.seq : seg.url + + if (!processedSegments.has(key)) { if (isLive && segmentQueue.length >= MAX_LIVE_QUEUE_SIZE) { - const oldUrl = segmentQueue.shift() - if (oldUrl) { - processedSegments.delete(oldUrl) - } + segmentQueue.shift() } - if (rememberSegment(url)) { - segmentQueue.push(url) + if (rememberSegment(key)) { + segmentQueue.push(seg) } } } @@ -185,11 +190,16 @@ async function _manageYoutubeHlsStream( } const segmentDownloader = async () => { + let nextSegmentPromise = null // { url, promise } + while (true) { - if (cancelSignal.aborted || (playlistEnded && segmentQueue.length === 0)) + if ( + cancelSignal.aborted || + (playlistEnded && segmentQueue.length === 0 && !nextSegmentPromise) + ) break - if (segmentQueue.length === 0) { + if (segmentQueue.length === 0 && !nextSegmentPromise) { await new Promise((resolve) => { const timeout = setTimeout(resolve, 50) if (typeof timeout.unref === 'function') timeout.unref() @@ -197,24 +207,62 @@ async function _manageYoutubeHlsStream( continue } - const segmentUrl = segmentQueue.shift() - if (processedSegments.has(segmentUrl)) { - processedSegments.delete(segmentUrl) - } + try { + let segmentUrl = null - if (cancelSignal.aborted) break + let res + if (nextSegmentPromise) { + segmentUrl = nextSegmentPromise.url + res = await nextSegmentPromise.promise + nextSegmentPromise = null + } else { + const seg = segmentQueue.shift() + if (!seg) continue + segmentUrl = seg.url + res = await http1makeRequest(segmentUrl, { streamOnly: true }) + } - try { - const res = await http1makeRequest(segmentUrl, { streamOnly: true }) + if ( + segmentQueue.length > 0 && + !nextSegmentPromise && + !cancelSignal.aborted + ) { + const nextSeg = segmentQueue.shift() + if (nextSeg) { + nextSegmentPromise = { + url: nextSeg.url, + promise: http1makeRequest(nextSeg.url, { streamOnly: true }) + } + } + } if (res.error || res.statusCode !== 200) { - logger( - 'warn', - 'YouTube-HLS-Downloader', - `Failed segment ${segmentUrl}: ${res.statusCode}` - ) if (res.stream) res.stream.destroy() - continue + + let retryCount = 0 + let success = false + while (retryCount < 3 && !cancelSignal.aborted) { + retryCount++ + const retryRes = await http1makeRequest(segmentUrl, { + streamOnly: true + }) + if (!retryRes.error && retryRes.statusCode === 200) { + res = retryRes + success = true + break + } + if (retryRes.stream) retryRes.stream.destroy() + await new Promise((r) => setTimeout(r, 500 * retryCount)) + } + + if (!success) { + logger( + 'warn', + 'YouTube-HLS-Downloader', + `Failed segment after retries: ${res.statusCode}` + ) + continue + } } if (outputStream.destroyed || cancelSignal.aborted) { @@ -238,7 +286,7 @@ async function _manageYoutubeHlsStream( logger( 'error', 'YouTube-HLS-Downloader', - `Error processing segment ${segmentUrl}: ${e.message}` + `Error processing segment: ${e.message}` ) } } @@ -268,7 +316,7 @@ async function _manageYoutubeHlsStream( let bestAudioOnlyUrl = null let bestBandwidth = 0 let bestAudioOnlyBandwidth = 0 - let isLive = + const isLive = masterPlaylistContent.includes('yt_live_broadcast') || masterPlaylistContent.includes('live/1') @@ -330,7 +378,9 @@ export default class YouTubeSource { constructor(nodelink) { this.nodelink = nodelink this.config = nodelink.options.sources.youtube - this.searchTerms = ['youtube', 'ytsearch', 'ytmsearch', 'ytmusic'] + this.additionalsSourceName = ['ytmusic'] + this.searchTerms = ['ytsearch', 'ytmsearch'] + this.recommendationTerm = ['ytrec'] this.patterns = [ /^https?:\/\/(?:www\.)?(?:youtube\.com\/(?:watch\?v=[\w-]+(?:&list=[\w-]+)?|playlist\?list=[\w-]+|live\/[\w-]+)|youtu\.be\/[\w-]+)/, /^https?:\/\/(?:www\.)?youtube\.com\/shorts\/[\w-]+/, @@ -418,6 +468,23 @@ export default class YouTubeSource { } async _fetchVisitorData() { + const cachedVisitorData = + this.nodelink.credentialManager.get('yt_visitor_data') + const cachedPlayerScript = this.nodelink.credentialManager.get( + 'yt_player_script_url' + ) + + if (cachedVisitorData && cachedPlayerScript) { + this.ytContext.client.visitorData = cachedVisitorData + this.cipherManager.setPlayerScriptUrl(cachedPlayerScript) + logger( + 'debug', + 'YouTube', + 'Context and player script loaded from cache. Skipping network request.' + ) + return + } + logger('debug', 'YouTube', 'Fetching visitor data...') let playerScriptUrl = null @@ -433,6 +500,11 @@ export default class YouTubeSource { const visitorMatch = data?.match(/"VISITOR_DATA":"([^"]+)"/) if (visitorMatch?.[1]) { this.ytContext.client.visitorData = visitorMatch[1] + this.nodelink.credentialManager.set( + 'yt_visitor_data', + visitorMatch[1], + 24 * 60 * 60 * 1000 + ) visitorFound = true } @@ -442,6 +514,11 @@ export default class YouTubeSource { /\/[a-z]{2}_[A-Z]{2}\//, '/en_US/' ) + this.nodelink.credentialManager.set( + 'yt_player_script_url', + playerScriptUrl, + 12 * 60 * 60 * 1000 + ) logger('debug', 'YouTube', `Player script URL: ${playerScriptUrl}`) } } @@ -479,7 +556,11 @@ export default class YouTubeSource { if (playerScriptUrl) this.cipherManager.setPlayerScriptUrl(playerScriptUrl) } - async search(query, type) { + async search(query, type, searchType = 'track') { + if (type === 'ytrec') { + return this.getRecommendations(query) + } + let clientList = this.config.clients.search if (type === 'ytmsearch') { @@ -496,9 +577,9 @@ export default class YouTubeSource { logger( 'debug', 'YouTube', - `Attempting search with client: ${clientName}` + `Attempting ${searchType} search with client: ${clientName}` ) - const result = await client.search(query, type, this.ytContext) + const result = await client.search(query, searchType, this.ytContext) if (result && result.loadType === 'search') { logger( @@ -542,6 +623,82 @@ export default class YouTubeSource { } } + async getRecommendations(query) { + let videoId = query + if (!/^[a-zA-Z0-9_-]{11}$/.test(query)) { + const searchRes = await this.search(query, 'ytmsearch') + if (searchRes.loadType !== 'search' || !searchRes.data.length) { + return { loadType: 'empty', data: {} } + } + videoId = searchRes.data[0].info.identifier + } + + try { + const automixId = `RD${videoId}` + let automixRes = null + + if (this.clients.Music) { + try { + automixRes = await this.clients.Music.resolve( + `https://music.youtube.com/playlist?list=${automixId}`, + 'ytmusic', + this.ytContext, + this.cipherManager + ) + } catch (e) { + logger( + 'debug', + 'YouTube', + `Music client failed for recommendations: ${e.message}` + ) + } + } + + if ( + (!automixRes || automixRes.loadType !== 'playlist') && + this.clients.TV + ) { + try { + automixRes = await this.clients.TV.resolve( + `https://www.youtube.com/playlist?list=${automixId}`, + 'youtube', + this.ytContext, + this.cipherManager + ) + } catch (e) { + logger( + 'debug', + 'YouTube', + `TV client failed for recommendations: ${e.message}` + ) + } + } + + if ( + automixRes && + automixRes.loadType === 'playlist' && + automixRes.data.tracks.length > 0 + ) { + const tracks = automixRes.data.tracks.filter( + (t) => t.info.identifier !== videoId + ) + return { + loadType: 'playlist', + data: { + info: { name: 'YouTube Recommendations', selectedTrack: 0 }, + pluginInfo: { type: 'recommendations' }, + tracks + } + } + } + + return { loadType: 'empty', data: {} } + } catch (e) { + logger('error', 'YouTube', `Recommendations failed: ${e.message}`) + return { exception: { message: e.message, severity: 'fault' } } + } + } + async resolve(url, type) { const liveMatch = url.match( /^https?:\/\/(?:www\.)?youtube\.com\/live\/([\w-]+)/ @@ -1389,13 +1546,25 @@ export default class YouTubeSource { activeRequest = null fetching = false if (!destroyed) { - logger('warn', 'YouTube', `Range request error at pos ${position}: ${err.message}`) - if (++errors >= MAX_RETRIES) { + logger( + 'warn', + 'YouTube', + `Range request error at pos ${position}: ${err.message}` + ) + const isAborted = + err.message === 'aborted' || err.code === 'ECONNRESET' + if (++errors >= MAX_RETRIES || isAborted) { + if (isAborted) + logger( + 'warn', + 'YouTube', + 'Connection aborted, forcing immediate recovery with new URL.' + ) recover(err) } else { const timeout = setTimeout( fetchNext, - Math.min(1000 * Math.pow(2, errors - 1), 5000) + Math.min(1000 * 2 ** (errors - 1), 5000) ) if (typeof timeout.unref === 'function') timeout.unref() } @@ -1415,13 +1584,25 @@ export default class YouTubeSource { activeRequest = null fetching = false if (!destroyed) { - logger('warn', 'YouTube', `Range request exception at pos ${position}: ${err.message}`) - if (++errors >= MAX_RETRIES) { + logger( + 'warn', + 'YouTube', + `Range request exception at pos ${position}: ${err.message}` + ) + const isAborted = + err.message === 'aborted' || err.code === 'ECONNRESET' + if (++errors >= MAX_RETRIES || isAborted) { + if (isAborted) + logger( + 'warn', + 'YouTube', + 'Connection aborted, forcing immediate recovery with new URL.' + ) recover(err) } else { const timeout = setTimeout( fetchNext, - Math.min(1000 * Math.pow(2, errors - 1), 5000) + Math.min(1000 * 2 ** (errors - 1), 5000) ) if (typeof timeout.unref === 'function') timeout.unref() } @@ -1432,15 +1613,22 @@ export default class YouTubeSource { const recover = async (causeError) => { if (destroyed || cancelSignal.aborted) return - const isForbidden = causeError?.message?.includes('403') || causeError?.statusCode === 403 + const isForbidden = + causeError?.message?.includes('403') || causeError?.statusCode === 403 + const isAborted = + causeError?.message === 'aborted' || causeError?.code === 'ECONNRESET' - if (!isForbidden && refreshes === 0) { - logger('debug', 'YouTube', `Retrying same URL for recovery first (cause: ${causeError?.message})...`) - errors = 0 - fetching = false - fetchNext() - refreshes++ - return + if (!isForbidden && !isAborted && refreshes === 0) { + logger( + 'debug', + 'YouTube', + `Retrying same URL for recovery first (cause: ${causeError?.message})...` + ) + errors = 0 + fetching = false + fetchNext() + refreshes++ + return } if (++refreshes > MAX_URL_REFRESH) { @@ -1481,7 +1669,10 @@ export default class YouTubeSource { `Recovery failed (attempt ${refreshes}): ${error.message}` ) if (!destroyed && !cancelSignal.aborted) { - recoverTimeout = setTimeout(() => recover(causeError), 4000 + refreshes * 1000) + recoverTimeout = setTimeout( + () => recover(causeError), + 4000 + refreshes * 1000 + ) if (typeof recoverTimeout.unref === 'function') { recoverTimeout.unref() } @@ -1518,4 +1709,4 @@ export default class YouTubeSource { return [] } } -} \ No newline at end of file +} diff --git a/src/sources/youtube/clients/Android.js b/src/sources/youtube/clients/Android.js index 767e811..69dc547 100644 --- a/src/sources/youtube/clients/Android.js +++ b/src/sources/youtube/clients/Android.js @@ -1,9 +1,9 @@ import { logger, makeRequest } from '../../../utils.js' import { BaseClient, - YOUTUBE_CONSTANTS, buildTrack, - checkURLType + checkURLType, + YOUTUBE_CONSTANTS } from '../common.js' export default class Android extends BaseClient { @@ -15,9 +15,9 @@ export default class Android extends BaseClient { return { client: { clientName: 'ANDROID', - clientVersion: '20.38.37', + clientVersion: '21.02.35', userAgent: - 'com.google.android.youtube/20.38.37 (Linux; U; Android 14) gzip', + 'com.google.android.youtube/21.02.35 (Linux; U; Android 14) identity', deviceMake: 'Google', deviceModel: 'Pixel 6', osName: 'Android', @@ -39,10 +39,14 @@ export default class Android extends BaseClient { async search(query, type, context) { const sourceName = 'youtube' + let params = 'EgIQAQ%3D%3D' // Default to track (video) + if (type === 'playlist' || type === 'album') params = 'EgIQAw%3D%3D' + if (type === 'artist' || type === 'channel') params = 'EgIQAg%3D%3D' + const requestBody = { context: this.getClient(context), query: query, - params: 'EgIQAQ%3D%3D' + params } try { @@ -98,11 +102,27 @@ export default class Android extends BaseClient { } const tracks = [] - const allSections = searchResult.contents?.sectionListRenderer?.contents - const lastIdx = allSections?.length - 1 - let videos = allSections?.[lastIdx]?.itemSectionRenderer?.contents + const allSections = + searchResult.contents?.sectionListRenderer?.contents || [] + const items = [] + + for (const section of allSections) { + let contents = section.itemSectionRenderer?.contents + if (!contents) { + const shelf = section.shelfRenderer || section.richShelfRenderer + contents = + shelf?.content?.verticalListRenderer?.items || + shelf?.content?.richGridRenderer?.contents + } + + if (Array.isArray(contents)) { + for (const item of contents) { + items.push(item.richItemRenderer?.content || item) + } + } + } - if (!videos || videos.length === 0) { + if (items.length === 0) { logger( 'debug', 'YouTube-Android', @@ -112,21 +132,25 @@ export default class Android extends BaseClient { } const maxResults = this.config.maxSearchResults || 10 - if (videos.length > maxResults) { - let count = 0 - videos = videos.filter((video) => { - const isValid = video.videoRenderer || video.compactVideoRenderer - if (isValid && count < maxResults) { - count++ - return true - } - return false - }) - } + let count = 0 + const filteredItems = items.filter((item) => { + const isValid = + item.videoRenderer || + item.compactVideoRenderer || + item.playlistRenderer || + item.compactPlaylistRenderer || + item.channelRenderer || + item.elementRenderer + if (isValid && count < maxResults) { + count++ + return true + } + return false + }) - for (const videoData of videos) { + for (const itemData of filteredItems) { const track = await buildTrack( - videoData, + itemData, sourceName, null, null, @@ -159,7 +183,7 @@ export default class Android extends BaseClient { } } - async resolve(url, type, context, cipherManager) { + async resolve(url, _type, context, cipherManager) { const sourceName = 'youtube' const urlType = checkURLType(url, 'youtube') const apiEndpoint = 'https://youtubei.googleapis.com' diff --git a/src/sources/youtube/clients/AndroidVR.js b/src/sources/youtube/clients/AndroidVR.js index 34a72fd..80edc9b 100644 --- a/src/sources/youtube/clients/AndroidVR.js +++ b/src/sources/youtube/clients/AndroidVR.js @@ -1,9 +1,9 @@ import { logger, makeRequest } from '../../../utils.js' import { BaseClient, - YOUTUBE_CONSTANTS, buildTrack, - checkURLType + checkURLType, + YOUTUBE_CONSTANTS } from '../common.js' export default class AndroidVR extends BaseClient { @@ -15,12 +15,13 @@ export default class AndroidVR extends BaseClient { return { client: { clientName: 'ANDROID_VR', - clientVersion: '1.65.10', + clientVersion: '1.71.26', userAgent: - 'com.google.android.apps.youtube.vr.oculus/1.65.10 (Linux; U; Android 12L; eureka-user Build/SQ3A.220605.009.A1) gzip', + 'com.google.android.apps.youtube.vr.oculus/1.71.26 (Linux; U; Android 15; eureka-user Build/AP4A.250205.002) gzip', + deviceMake: 'Google', osName: 'Android', - osVersion: '12L', - androidSdkVersion: '32', + osVersion: '15', + androidSdkVersion: '35', hl: context.client.hl, gl: context.client.gl, visitorData: context.client.visitorData @@ -34,7 +35,7 @@ export default class AndroidVR extends BaseClient { return false } - async search(query, type, context) { + async search(query, _type, context) { const sourceName = 'youtube' const requestBody = { @@ -157,7 +158,7 @@ export default class AndroidVR extends BaseClient { } } - async resolve(url, type, context, cipherManager) { + async resolve(url, _type, context, cipherManager) { const sourceName = 'youtube' const urlType = checkURLType(url, 'youtube') const apiEndpoint = 'https://youtubei.googleapis.com' diff --git a/src/sources/youtube/clients/IOS.js b/src/sources/youtube/clients/IOS.js index 87ef3f1..8c1887e 100644 --- a/src/sources/youtube/clients/IOS.js +++ b/src/sources/youtube/clients/IOS.js @@ -1,10 +1,5 @@ import { logger, makeRequest } from '../../../utils.js' -import { - BaseClient, - YOUTUBE_CONSTANTS, - buildTrack, - checkURLType -} from '../common.js' +import { BaseClient, checkURLType, YOUTUBE_CONSTANTS } from '../common.js' export default class IOS extends BaseClient { constructor(nodelink, oauth) { @@ -15,13 +10,13 @@ export default class IOS extends BaseClient { return { client: { clientName: 'IOS', - clientVersion: '20.10.4', + clientVersion: '21.02.1', userAgent: - 'com.google.ios.youtube/20.10.4 (iPhone16,2; U; CPU iOS 18_3_2 like Mac OS X;)', + 'com.google.ios.youtube/21.02.1 (iPhone16,2; U; CPU iOS 18_2 like Mac OS X;)', deviceMake: 'Apple', deviceModel: 'iPhone16,2', osName: 'iPhone', - osVersion: '18.3.2.22D82', + osVersion: '18.2.22C152', utcOffsetMinutes: 0, hl: context.client.hl, gl: context.client.gl, @@ -44,7 +39,7 @@ export default class IOS extends BaseClient { return { loadType: 'empty', data: {} } } - async resolve(url, type, context, cipherManager) { + async resolve(url, _type, context, cipherManager) { const sourceName = 'youtube' const urlType = checkURLType(url, 'youtube') const apiEndpoint = 'https://youtubei.googleapis.com' diff --git a/src/sources/youtube/clients/Music.js b/src/sources/youtube/clients/Music.js index 9bd079a..b4b354f 100644 --- a/src/sources/youtube/clients/Music.js +++ b/src/sources/youtube/clients/Music.js @@ -1,9 +1,9 @@ import { logger, makeRequest } from '../../../utils.js' import { BaseClient, - YOUTUBE_CONSTANTS, buildTrack, - checkURLType + checkURLType, + YOUTUBE_CONSTANTS } from '../common.js' export default class Music extends BaseClient { @@ -34,10 +34,15 @@ export default class Music extends BaseClient { async search(query, type, context) { const sourceName = 'ytmusic' + let params = 'EgWKAQIIAWoQEAMQBBAJEAoQBRAREBAQFQ%3D%3D' // Default (Tracks) + if (type === 'playlist') params = 'EgWKAQIoAWoKEAMQBBAJEAoQBRAB' + if (type === 'album') params = 'EgWKAQIYAWoKEAMQBBAJEAoQBRAB' + if (type === 'artist') params = 'EgWKAQIYAWoKEAMQBBAJEAoQBRAB' + const requestBody = { context: this.getClient(context), query: query, - params: 'EgWKAQIIAWoQEAMQBBAJEAoQBRAREBAQFQ%3D%3D' + params } const { @@ -78,9 +83,11 @@ export default class Music extends BaseClient { } } - const tabContent = searchResult.contents?.tabbedSearchResultsRenderer?.tabs?.[0]?.tabRenderer?.content - - let loggedVideoData = false + const tabContent = + searchResult.contents?.tabbedSearchResultsRenderer?.tabs?.[0]?.tabRenderer + ?.content + + const _loggedVideoData = false const tracks = [] let videos = null @@ -98,10 +105,16 @@ export default class Music extends BaseClient { videos = findShelf(tabContent.sectionListRenderer.contents) } - if (!videos && tabContent?.musicSplitViewRenderer?.mainContent?.sectionListRenderer) { - videos = findShelf(tabContent.musicSplitViewRenderer.mainContent.sectionListRenderer.contents) + if ( + !videos && + tabContent?.musicSplitViewRenderer?.mainContent?.sectionListRenderer + ) { + videos = findShelf( + tabContent.musicSplitViewRenderer.mainContent.sectionListRenderer + .contents + ) } - + if (!videos || videos.length === 0) { logger( 'debug', @@ -112,7 +125,9 @@ export default class Music extends BaseClient { } for (const video of videos) { - const renderer = video.musicResponsiveListItemRenderer || video.musicTwoColumnItemRenderer + const renderer = + video.musicResponsiveListItemRenderer || + video.musicTwoColumnItemRenderer if (!renderer) { continue } @@ -126,10 +141,10 @@ export default class Music extends BaseClient { return { loadType: 'search', data: tracks } } - async resolve(url, type, context, cipherManager) { + async resolve(url, _type, context, cipherManager) { const sourceName = 'ytmusic' const urlType = checkURLType(url, sourceName) - const apiEndpoint = this.getApiEndpoint() + const _apiEndpoint = this.getApiEndpoint() switch (urlType) { case YOUTUBE_CONSTANTS.VIDEO: @@ -162,7 +177,7 @@ export default class Music extends BaseClient { exception: { message, severity: 'common', cause: 'Upstream' } } } - + return await this._handlePlayerResponse( playerResponse, sourceName, @@ -171,13 +186,43 @@ export default class Music extends BaseClient { } case YOUTUBE_CONSTANTS.PLAYLIST: { - return { - exception: { - message: 'Music client does not support playlists', - severity: 'common', - cause: 'UpstreamPlayability' + const listIdMatch = url.match(/[?&]list=([\w-]+)/) + if (!listIdMatch || !listIdMatch[1]) { + return { loadType: 'empty', data: {} } + } + const playlistId = listIdMatch[1] + + const body = { + context: this.getClient(context), + playlistId, + enablePersistentPlaylistPanel: true, + isAudioOnly: true + } + + const { body: res, statusCode } = await makeRequest( + 'https://music.youtube.com/youtubei/v1/next', + { + method: 'POST', + body, + headers: { + 'User-Agent': this.getClient(context).client.userAgent, + 'X-Goog-Api-Format-Version': '2' + }, + disableBodyCompression: true } + ) + + if (statusCode !== 200 || !res) { + return { loadType: 'empty', data: {} } } + + return await this._handlePlaylistResponse( + playlistId, + null, + res, + sourceName, + context + ) } default: @@ -185,7 +230,7 @@ export default class Music extends BaseClient { } } - async getTrackUrl(decodedTrack, context, cipherManager) { + async getTrackUrl(_decodedTrack, _context, _cipherManager) { return { exception: { message: 'Music client does not provide direct track URLs.', diff --git a/src/sources/youtube/clients/TV.js b/src/sources/youtube/clients/TV.js index ab7854e..42ddbf0 100644 --- a/src/sources/youtube/clients/TV.js +++ b/src/sources/youtube/clients/TV.js @@ -1,10 +1,5 @@ import { logger, makeRequest } from '../../../utils.js' -import { - BaseClient, - YOUTUBE_CONSTANTS, - buildTrack, - checkURLType -} from '../common.js' +import { BaseClient, checkURLType, YOUTUBE_CONSTANTS } from '../common.js' export default class TV extends BaseClient { constructor(nodelink, oauth) { @@ -15,8 +10,9 @@ export default class TV extends BaseClient { return { client: { clientName: 'TVHTML5', - clientVersion: '7.20250923.13.00', - userAgent: 'Mozilla/5.0 (ChromiumStylePlatform) Cobalt/Version', + clientVersion: '7.20260113.16.00', + userAgent: + 'Mozilla/5.0(SMART-TV; Linux; Tizen 4.0.0.2) AppleWebkit/605.1.15 (KHTML, like Gecko) SamsungBrowser/9.2 TV Safari/605.1.15', hl: context.client.hl, gl: context.client.gl }, @@ -51,7 +47,7 @@ export default class TV extends BaseClient { return {} } - async resolve(url, type, context, cipherManager) { + async resolve(url, _type, context, cipherManager) { const sourceName = 'youtube' const urlType = checkURLType(url, 'youtube') const apiEndpoint = this.getApiEndpoint() diff --git a/src/sources/youtube/clients/TVEmbedded.js b/src/sources/youtube/clients/TVEmbedded.js index b77197c..3870030 100644 --- a/src/sources/youtube/clients/TVEmbedded.js +++ b/src/sources/youtube/clients/TVEmbedded.js @@ -1,10 +1,5 @@ import { logger, makeRequest } from '../../../utils.js' -import { - BaseClient, - YOUTUBE_CONSTANTS, - buildTrack, - checkURLType -} from '../common.js' +import { BaseClient, checkURLType, YOUTUBE_CONSTANTS } from '../common.js' export default class TVEmbedded extends BaseClient { constructor(nodelink, oauth) { @@ -16,13 +11,14 @@ export default class TVEmbedded extends BaseClient { client: { clientName: 'TVHTML5_SIMPLY_EMBEDDED_PLAYER', clientVersion: '2.0', - userAgent: 'Mozilla/5.0 (ChromiumStylePlatform) Cobalt/Version', + userAgent: + 'Mozilla/5.0 (Linux armeabi-v7a; Android 7.1.2; Fire OS 6.0) Cobalt/22.lts.3.306369-gold (unlike Gecko) v8/8.8.278.8-jit gles Starboard/13, Amazon_ATV_mediatek8695_2019/NS6294 (Amazon, AFTMM, Wireless) com.amazon.firetv.youtube/22.3.r2.v66.0', hl: context.client.hl, gl: context.client.gl }, user: { lockedSafetyMode: false }, request: { useSsl: true }, - thirdParty: { embedUrl: 'https://www.youtube.com' } + thirdParty: { embedUrl: 'https://www.youtube.com/tv' } } } @@ -60,7 +56,7 @@ export default class TVEmbedded extends BaseClient { return {} } - async resolve(url, type, context, cipherManager) { + async resolve(url, _type, context, cipherManager) { const sourceName = 'youtube' const urlType = checkURLType(url, 'youtube') const apiEndpoint = this.getApiEndpoint() diff --git a/src/sources/youtube/clients/Web.js b/src/sources/youtube/clients/Web.js index 9d6551d..8a0c342 100644 --- a/src/sources/youtube/clients/Web.js +++ b/src/sources/youtube/clients/Web.js @@ -1,9 +1,9 @@ import { logger, makeRequest } from '../../../utils.js' import { BaseClient, - YOUTUBE_CONSTANTS, buildTrack, - checkURLType + checkURLType, + YOUTUBE_CONSTANTS } from '../common.js' export default class Web extends BaseClient { @@ -15,10 +15,10 @@ export default class Web extends BaseClient { return { client: { clientName: 'WEB', - clientVersion: '2.20251030.01.00', + clientVersion: '2.20260114.01.00', platform: 'DESKTOP', userAgent: - 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36,gzip(gfe)', + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/143.0.0.0 Safari/537.36,gzip(gfe)', hl: context.client.hl, gl: context.client.gl }, @@ -31,7 +31,7 @@ export default class Web extends BaseClient { return true } - async search(query, type, context) { + async search(query, _type, context) { const sourceName = 'youtube' const requestBody = { @@ -129,7 +129,7 @@ export default class Web extends BaseClient { return { loadType: 'search', data: tracks } } - async resolve(url, type, context, cipherManager) { + async resolve(url, _type, context, cipherManager) { const sourceName = 'youtube' const urlType = checkURLType(url, 'youtube') const apiEndpoint = this.getApiEndpoint() @@ -283,17 +283,18 @@ export default class Web extends BaseClient { query: trackInfo.identifier } - const { body: searchResult, error, statusCode } = await makeRequest( - 'https://www.youtube.com/youtubei/v1/search', - { - method: 'POST', - headers: { - 'User-Agent': this.getClient(context).client.userAgent - }, - body: requestBody, - disableBodyCompression: true - } - ) + const { + body: searchResult, + error, + statusCode + } = await makeRequest('https://www.youtube.com/youtubei/v1/search', { + method: 'POST', + headers: { + 'User-Agent': this.getClient(context).client.userAgent + }, + body: requestBody, + disableBodyCompression: true + }) if (error || statusCode !== 200) { throw new Error( @@ -301,8 +302,9 @@ export default class Web extends BaseClient { ) } - const contents = searchResult.contents?.twoColumnSearchResultsRenderer - ?.primaryContents?.sectionListRenderer?.contents + const contents = + searchResult.contents?.twoColumnSearchResultsRenderer?.primaryContents + ?.sectionListRenderer?.contents if (!contents) return [] @@ -311,7 +313,10 @@ export default class Web extends BaseClient { for (const section of contents) { if (section.itemSectionRenderer) { for (const item of section.itemSectionRenderer.contents) { - if (item.videoRenderer && item.videoRenderer.videoId === trackInfo.identifier) { + if ( + item.videoRenderer && + item.videoRenderer.videoId === trackInfo.identifier + ) { videoRenderer = item.videoRenderer break } @@ -322,9 +327,9 @@ export default class Web extends BaseClient { if (!videoRenderer) return [] - const macroMarkersCards = videoRenderer.expandableMetadata - ?.expandableMetadataRenderer?.expandedContent - ?.horizontalCardListRenderer?.cards + const macroMarkersCards = + videoRenderer.expandableMetadata?.expandableMetadataRenderer + ?.expandedContent?.horizontalCardListRenderer?.cards if (!macroMarkersCards) return [] @@ -333,12 +338,15 @@ export default class Web extends BaseClient { for (const card of macroMarkersCards) { const renderer = card.macroMarkersListItemRenderer if (renderer) { - const title = renderer.title?.simpleText || renderer.title?.runs?.[0]?.text - const timeStr = renderer.timeDescription?.simpleText || renderer.timeDescription?.runs?.[0]?.text - + const title = + renderer.title?.simpleText || renderer.title?.runs?.[0]?.text + const timeStr = + renderer.timeDescription?.simpleText || + renderer.timeDescription?.runs?.[0]?.text + let thumbnails = [] - if (renderer.thumbnail && renderer.thumbnail.thumbnails) { - thumbnails = renderer.thumbnail.thumbnails + if (renderer.thumbnail?.thumbnails) { + thumbnails = renderer.thumbnail.thumbnails } if (title && timeStr) { diff --git a/src/sources/youtube/common.js b/src/sources/youtube/common.js index bda3ffc..f9b2573 100644 --- a/src/sources/youtube/common.js +++ b/src/sources/youtube/common.js @@ -57,19 +57,75 @@ function formatDuration(ms) { } function formatNumber(num) { - if (!num || isNaN(num)) return '0' + if (!num || Number.isNaN(num)) return '0' if (num >= 1000000000) return `${(num / 1000000000).toFixed(1)}B` if (num >= 1000000) return `${(num / 1000000).toFixed(1)}M` if (num >= 1000) return `${(num / 1000).toFixed(1)}K` return String(num) } +function _buildReadableTime(units) { + if (units.years > 0) + return `${units.years} year${units.years > 1 ? 's' : ''} ago` + if (units.months > 0) + return `${units.months} month${units.months > 1 ? 's' : ''} ago` + if (units.weeks > 0) + return `${units.weeks} week${units.weeks > 1 ? 's' : ''} ago` + if (units.days > 0) return `${units.days} day${units.days > 1 ? 's' : ''} ago` + if (units.hours > 0) + return `${units.hours} hour${units.hours > 1 ? 's' : ''} ago` + if (units.minutes > 0) + return `${units.minutes} minute${units.minutes > 1 ? 's' : ''} ago` + if (units.seconds > 0) + return `${units.seconds} second${units.seconds > 1 ? 's' : ''} ago` + return 'just now' +} + +function _buildPublishedAtFromTimestamp(timestamp, originalText) { + const diff = Date.now() - timestamp + const diffAbs = Math.abs(diff) + const resultUnits = { + years: 0, + months: 0, + weeks: 0, + days: 0, + hours: 0, + minutes: 0, + seconds: 0 + } + + if (diffAbs >= TIME_UNIT_MULTIPLIERS.year) { + resultUnits.years = Math.floor(diffAbs / TIME_UNIT_MULTIPLIERS.year) + } else if (diffAbs >= TIME_UNIT_MULTIPLIERS.month) { + resultUnits.months = Math.floor(diffAbs / TIME_UNIT_MULTIPLIERS.month) + } else if (diffAbs >= TIME_UNIT_MULTIPLIERS.week) { + resultUnits.weeks = Math.floor(diffAbs / TIME_UNIT_MULTIPLIERS.week) + } else if (diffAbs >= TIME_UNIT_MULTIPLIERS.day) { + resultUnits.days = Math.floor(diffAbs / TIME_UNIT_MULTIPLIERS.day) + } else if (diffAbs >= TIME_UNIT_MULTIPLIERS.hour) { + resultUnits.hours = Math.floor(diffAbs / TIME_UNIT_MULTIPLIERS.hour) + } else if (diffAbs >= TIME_UNIT_MULTIPLIERS.minute) { + resultUnits.minutes = Math.floor(diffAbs / TIME_UNIT_MULTIPLIERS.minute) + } else { + resultUnits.seconds = Math.floor(diffAbs / TIME_UNIT_MULTIPLIERS.second) + } + + return { + original: originalText, + timestamp: Math.floor(timestamp), + date: new Date(timestamp).toISOString(), + readable: _buildReadableTime(resultUnits), + compact: `${resultUnits.years}y ${resultUnits.months}mo ${resultUnits.weeks}w ${resultUnits.days}d ${resultUnits.hours}h ${resultUnits.minutes}m ${resultUnits.seconds}s`, + ago: resultUnits + } +} + function parsePublishedAt(publishedText) { if (!publishedText) return null const date = new Date(publishedText) - if (!isNaN(date.getTime())) { + if (!Number.isNaN(date.getTime())) { return _buildPublishedAtFromTimestamp(date.getTime(), publishedText) } @@ -494,7 +550,7 @@ async function resolveExternalLinks(externalLinks, makeRequest) { followRedirects: true, maxRedirects: 5 }) - if (response.finalUrl && response.finalUrl.includes('spotify.com')) { + if (response.finalUrl?.includes('spotify.com')) { resolved.spotify = response.finalUrl const match = response.finalUrl.match( @@ -507,7 +563,7 @@ async function resolveExternalLinks(externalLinks, makeRequest) { } } } - } catch (e) { } + } catch (_e) {} } if ( @@ -521,10 +577,10 @@ async function resolveExternalLinks(externalLinks, makeRequest) { followRedirects: true, maxRedirects: 5 }) - if (response.finalUrl && response.finalUrl.includes('music.apple.com')) { + if (response.finalUrl?.includes('music.apple.com')) { resolved.appleMusic = response.finalUrl } - } catch (e) { } + } catch (_e) {} } return resolved @@ -532,8 +588,8 @@ async function resolveExternalLinks(externalLinks, makeRequest) { function extractExternalLinks( description, - resolve = false, - makeRequest = null + _resolve = false, + _makeRequest = null ) { if (!description) return null @@ -645,8 +701,8 @@ function extractVideoQualities(streamingData) { } return Array.from(qualityMap.values()).sort((a, b) => { - const resA = Number.parseInt(a.quality) || 0 - const resB = Number.parseInt(b.quality) || 0 + const resA = Number.parseInt(a.quality, 10) || 0 + const resB = Number.parseInt(b.quality, 10) || 0 return resA - resB }) } @@ -722,13 +778,15 @@ function extractAudioTracks(streamingData) { function extractCaptions(captionsData) { if (!captionsData?.playerCaptionsTracklistRenderer?.captionTracks) return [] - return captionsData.playerCaptionsTracklistRenderer.captionTracks.map((c) => ({ - languageCode: c.languageCode, - name: c.name?.simpleText, - isTranslatable: c.isTranslatable, - baseUrl: c.baseUrl, - kind: c.kind - })) + return captionsData.playerCaptionsTracklistRenderer.captionTracks.map( + (c) => ({ + languageCode: c.languageCode, + name: c.name?.simpleText, + isTranslatable: c.isTranslatable, + baseUrl: c.baseUrl, + kind: c.kind + }) + ) } function parseLengthAndStream(lengthText, lengthSeconds, isLive) { @@ -754,21 +812,47 @@ function getRendererFromItemData(itemData, itemType) { if (!itemData) return null if (itemType === 'ytmusic') { - return getItemValue(itemData, [ + const data = getItemValue(itemData, [ 'musicResponsiveListItemRenderer', 'playlistPanelVideoRenderer', 'musicTwoColumnItemRenderer' ]) + return data ? { _type: 'track', ...data } : null } - return ( - getItemValue(itemData, [ - 'videoRenderer', - 'compactVideoRenderer', - 'playlistPanelVideoRenderer', - 'gridVideoRenderer' - ]) || (itemData.videoId ? itemData : null) - ) + const rendererTypes = [ + { key: 'videoRenderer', type: 'track' }, + { key: 'compactVideoRenderer', type: 'track' }, + { key: 'playlistRenderer', type: 'playlist' }, + { key: 'compactPlaylistRenderer', type: 'playlist' }, + { key: 'channelRenderer', type: 'channel' }, + { key: 'playlistPanelVideoRenderer', type: 'track' }, + { key: 'gridVideoRenderer', type: 'track' } + ] + + for (const r of rendererTypes) { + if (itemData[r.key]) { + return { _type: r.type, ...itemData[r.key] } + } + } + + if (itemData.elementRenderer) { + const model = getItemValue(itemData.elementRenderer, [ + 'newElement.type.componentType.model' + ]) + const data = + model?.compactChannelModel?.compactChannelData || + model?.compactPlaylistModel?.compactPlaylistData + + if (data) { + return { + _type: model.compactChannelModel ? 'channel' : 'playlist', + ...data + } + } + } + + return itemData.videoId ? { _type: 'track', ...itemData } : null } export async function buildTrack( @@ -786,6 +870,93 @@ export async function buildTrack( const renderer = getRendererFromItemData(itemData, itemType) + if (renderer?._type === 'channel') { + const ch = renderer.channelRenderer || renderer + const channelId = + ch.channelId || + getItemValue(ch, ['onTap.innertubeCommand.browseEndpoint.browseId']) || + getItemValue(ch, ['endpoint.innertubeCommand.browseEndpoint.browseId']) + const title = + ch.attributedTitle?.content || + (typeof ch.title === 'string' + ? ch.title + : getRunsText(ch.title?.runs) || ch.title?.simpleText) || + getRunsText(ch.displayName?.runs) || + FALLBACK_TITLE + + if (!channelId) return null + + const trackInfo = { + identifier: channelId, + isSeekable: false, + author: title, + length: 0, + isStream: false, + position: 0, + title, + uri: `https://www.youtube.com/channel/${channelId}`, + artworkUrl: extractThumbnail(ch, null), + isrc: null, + sourceName: sourceNameOverride || 'youtube' + } + + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { + type: 'channel_result', + videoCount: + getRunsText(ch.videoCountText?.runs) || ch.videoCount || '0', + subscriberCount: + getRunsText(ch.subscriberCountText?.runs) || ch.subscriberCount, + handle: ch.handle + } + } + } + + if (renderer?._type === 'playlist') { + const pl = renderer + const playlistId = pl.playlistId + const title = + pl.attributedTitle?.content || + (typeof pl.title === 'string' + ? pl.title + : getRunsText(pl.title?.runs) || pl.title?.simpleText) || + FALLBACK_TITLE + const author = + (typeof pl.authorName === 'string' + ? pl.authorName + : getRunsText(pl.longBylineText?.runs) || + getRunsText(pl.shortBylineText?.runs)) || FALLBACK_AUTHOR + const videoCount = + getRunsText(pl.videoCountText?.runs) || pl.videoCount || '0' + + if (!playlistId) return null + + const trackInfo = { + identifier: playlistId, + isSeekable: false, + author, + length: 0, + isStream: false, + position: 0, + title, + uri: `https://www.youtube.com/playlist?list=${playlistId}`, + artworkUrl: extractThumbnail(pl, null), + isrc: null, + sourceName: sourceNameOverride || 'youtube' + } + + return { + encoded: encodeTrack(trackInfo), + info: trackInfo, + pluginInfo: { + type: 'playlist_result', + videoCount + } + } + } + const videoId = getItemValue(renderer, [ 'playlistItemData.videoId', @@ -809,13 +980,21 @@ export async function buildTrack( if (itemType === 'ytmusic') { title = safeString( - getRunsText(getItemValue(renderer, ['title.runs'])), + getRunsText(getItemValue(renderer, ['title.runs'])) || + getItemValue(renderer, ['title.simpleText']), FALLBACK_TITLE ) const subtitleRuns = getItemValue(renderer, ['subtitle.runs']) + const longBylineRuns = getItemValue(renderer, ['longBylineText.runs']) + const shortBylineRuns = getItemValue(renderer, ['shortBylineText.runs']) + if (Array.isArray(subtitleRuns) && subtitleRuns.length > 0) { author = safeString(subtitleRuns[0]?.text, FALLBACK_AUTHOR) + } else if (Array.isArray(longBylineRuns) && longBylineRuns.length > 0) { + author = safeString(longBylineRuns[0]?.text, FALLBACK_AUTHOR) + } else if (Array.isArray(shortBylineRuns) && shortBylineRuns.length > 0) { + author = safeString(shortBylineRuns[0]?.text, FALLBACK_AUTHOR) } let lengthText = null @@ -826,6 +1005,12 @@ export async function buildTrack( lengthText = lengthRun?.text } + if (!lengthText) { + lengthText = + getItemValue(renderer, ['lengthText.simpleText']) || + getRunsText(getItemValue(renderer, ['lengthText.runs'])) + } + const parsed = parseLengthAndStream( lengthText, itemData.lengthSeconds, @@ -996,7 +1181,7 @@ export async function buildHoloTrack( let thumbnails = {} let viewCount = null - let badges = [] + let _badges = [] let accessibilityLabel = `${trackInfo.title} by ${trackInfo.author}` let publishedAt = null let keywords = [] @@ -1097,7 +1282,7 @@ export async function buildHoloTrack( accessibilityLabel = accessibilityLabel || rendererAccessibility const ownerBadges = renderer.ownerBadges || [] - badges = ownerBadges + _badges = ownerBadges .map((b) => getItemValue(b, [ 'metadataBadgeRenderer.tooltip', @@ -1280,30 +1465,43 @@ export async function fetchEncryptedHostFlags(videoId) { const { body, statusCode, error } = await makeRequest(embedUrl, { method: 'GET', headers: { - 'Referer': 'https://www.google.com', - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + Referer: 'https://www.google.com', + 'User-Agent': + 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' } }) if (error || statusCode !== 200 || !body) { - logger('warn', 'fetchEncryptedHostFlags', - `Failed to fetch embed page: ${statusCode} - ${error?.message}`) + logger( + 'warn', + 'fetchEncryptedHostFlags', + `Failed to fetch embed page: ${statusCode} - ${error?.message}` + ) return null } const match = body.match(/"encryptedHostFlags":"([^"]+)"/) - if (match && match[1]) { - logger('debug', 'fetchEncryptedHostFlags', - `Successfully extracted encryptedHostFlags for ${videoId}`) + if (match?.[1]) { + logger( + 'debug', + 'fetchEncryptedHostFlags', + `Successfully extracted encryptedHostFlags for ${videoId}` + ) return match[1] } - logger('debug', 'fetchEncryptedHostFlags', - 'encryptedHostFlags not found in embed page') + logger( + 'debug', + 'fetchEncryptedHostFlags', + 'encryptedHostFlags not found in embed page' + ) return null } catch (e) { - logger('error', 'fetchEncryptedHostFlags', - `Error fetching encryptedHostFlags: ${e.message}`) + logger( + 'error', + 'fetchEncryptedHostFlags', + `Error fetching encryptedHostFlags: ${e.message}` + ) return null } } @@ -1340,7 +1538,7 @@ export class BaseClient { return {} } - async search(query, type) { + async search(_query, _type) { return { loadType: 'empty', data: {} } } @@ -1372,7 +1570,7 @@ export class BaseClient { if (this.requirePlayerScript() && cipherManager) { try { const playerScript = await cipherManager.getCachedPlayerScript() - if (playerScript && playerScript.url) { + if (playerScript?.url) { const signatureTimestamp = await cipherManager.getTimestamp( playerScript.url ) @@ -1383,8 +1581,11 @@ export class BaseClient { signatureTimestamp } } catch (e) { - logger('warn', `youtube-${this.name}`, - `Failed to get signature timestamp: ${e.message}`) + logger( + 'warn', + `youtube-${this.name}`, + `Failed to get signature timestamp: ${e.message}` + ) } } @@ -1395,7 +1596,9 @@ export class BaseClient { headers: { 'User-Agent': this.getClient(context).client.userAgent, ...(this.getClient(context).client.visitorData - ? { 'X-Goog-Visitor-Id': this.getClient(context).client.visitorData } + ? { + 'X-Goog-Visitor-Id': this.getClient(context).client.visitorData + } : {}), ...(this.isEmbedded() ? { Referer: 'https://www.youtube.com' } : {}), ...headers @@ -1414,7 +1617,7 @@ export class BaseClient { return response } - async _handlePlayerResponse(playerResponse, sourceName, videoId, context) { + async _handlePlayerResponse(playerResponse, sourceName, videoId, _context) { if (!playerResponse || typeof playerResponse !== 'object') { logger( 'error', @@ -1507,7 +1710,7 @@ export class BaseClient { currentVideoId, playlistResponse, sourceName, - context + _context ) { if (playlistResponse?.error) { const errMsg = @@ -1611,7 +1814,7 @@ export class BaseClient { playlistId, browseResponse, sourceName, - context + _context ) { if (browseResponse?.error) { const errMsg = @@ -1783,21 +1986,19 @@ export class BaseClient { } } } else { - const defaultFormats = formats.filter( - (f) => f.audioTrack && f.audioTrack.audioIsDefault - ) + const defaultFormats = formats.filter((f) => f.audioTrack?.audioIsDefault) if (defaultFormats.length > 0) { - logger( - 'debug', - `youtube-${this.name}`, - `Using default audio track.` - ) + logger('debug', `youtube-${this.name}`, `Using default audio track.`) formats = defaultFormats } } - const _attemptCipherResolution = async (formatToResolve, playerScript, context) => { + const _attemptCipherResolution = async ( + formatToResolve, + playerScript, + context + ) => { let currentStreamUrl = formatToResolve.url let currentEncryptedSignature let currentNParam @@ -1860,57 +2061,109 @@ export class BaseClient { exception: { message: 'Failed to obtain player script for deciphering.', severity: 'fault', - cause: 'Internal', - }, + cause: 'Internal' + } } } - logger('debug', `youtube-${this.name}`, `Initial target itags (from config/quality priority): ${targetItags.join(', ')}`) + logger( + 'debug', + `youtube-${this.name}`, + `Initial target itags (from config/quality priority): ${targetItags.join(', ')}` + ) const opusAudioCandidates = formats - .filter((format) => targetItags.includes(format.itag) && format.mimeType?.startsWith('audio/')) + .filter( + (format) => + targetItags.includes(format.itag) && + format.mimeType?.startsWith('audio/') + ) .sort((a, b) => targetItags.indexOf(a.itag) - targetItags.indexOf(b.itag)) - logger('debug', `youtube-${this.name}`, `Opus audio-only candidates: ${opusAudioCandidates.map(f => f.itag).join(', ')}`) + logger( + 'debug', + `youtube-${this.name}`, + `Opus audio-only candidates: ${opusAudioCandidates.map((f) => f.itag).join(', ')}` + ) for (const format of opusAudioCandidates) { - resolvedFormat = await _attemptCipherResolution(format, playerScript, context) + resolvedFormat = await _attemptCipherResolution( + format, + playerScript, + context + ) if (resolvedFormat) { - logger('debug', `youtube-${this.name}`, `Resolved format: itag ${resolvedFormat.itag}, mimeType ${resolvedFormat.mimeType}`) + logger( + 'debug', + `youtube-${this.name}`, + `Resolved format: itag ${resolvedFormat.itag}, mimeType ${resolvedFormat.mimeType}` + ) break } } if (!resolvedFormat) { - logger('debug', `youtube-${this.name}`, `Opus audio-only failed. Attempting fallback to itag 18.`) - const itag18Format = formats.find(format => format.itag === 18) + logger( + 'debug', + `youtube-${this.name}`, + `Opus audio-only failed. Attempting fallback to itag 18.` + ) + const itag18Format = formats.find((format) => format.itag === 18) if (itag18Format) { - resolvedFormat = await _attemptCipherResolution(itag18Format, playerScript, context) + resolvedFormat = await _attemptCipherResolution( + itag18Format, + playerScript, + context + ) if (resolvedFormat) { - logger('debug', `youtube-${this.name}`, `Resolved format from itag 18 fallback: itag ${resolvedFormat.itag}, mimeType ${resolvedFormat.mimeType}`) + logger( + 'debug', + `youtube-${this.name}`, + `Resolved format from itag 18 fallback: itag ${resolvedFormat.itag}, mimeType ${resolvedFormat.mimeType}` + ) } else { - logger('debug', `youtube-${this.name}`, `Itag 18 found but could not be resolved.`) + logger( + 'debug', + `youtube-${this.name}`, + `Itag 18 found but could not be resolved.` + ) } } else { - logger('debug', `youtube-${this.name}`, `Itag 18 not found in available formats.`) + logger( + 'debug', + `youtube-${this.name}`, + `Itag 18 not found in available formats.` + ) } } if (!resolvedFormat && !streamingData.hlsManifestUrl) { - logger('debug', `youtube-${this.name}`, 'No suitable stream found after all fallbacks, and no HLS manifest URL.') + logger( + 'debug', + `youtube-${this.name}`, + 'No suitable stream found after all fallbacks, and no HLS manifest URL.' + ) return { exception: { message: 'No suitable audio stream found after all fallbacks.', severity: 'common', - cause: 'Upstream', + cause: 'Upstream' }, - formats, + formats } } else if (!resolvedFormat && streamingData.hlsManifestUrl) { - logger('debug', `youtube-${this.name}`, 'No suitable stream found after all fallbacks, but HLS manifest URL is available. Proceeding with HLS.') + logger( + 'debug', + `youtube-${this.name}`, + 'No suitable stream found after all fallbacks, but HLS manifest URL is available. Proceeding with HLS.' + ) } else { - logger('debug', `youtube-${this.name}`, `Final resolved format: itag ${resolvedFormat?.itag}, mimeType ${resolvedFormat?.mimeType}`) + logger( + 'debug', + `youtube-${this.name}`, + `Final resolved format: itag ${resolvedFormat?.itag}, mimeType ${resolvedFormat?.mimeType}` + ) } const directUrl = @@ -1919,17 +2172,21 @@ export class BaseClient { : undefined if (!directUrl && !streamingData.hlsManifestUrl) { - logger('debug', `youtube-${this.name}`, 'No direct URL resolved and no HLS manifest. Returning error.') + logger( + 'debug', + `youtube-${this.name}`, + 'No direct URL resolved and no HLS manifest. Returning error.' + ) return { exception: { message: 'No suitable audio stream found.', severity: 'common', - cause: 'Upstream', + cause: 'Upstream' }, - formats, + formats } } @@ -1970,7 +2227,7 @@ export class BaseClient { hlsUrl: streamingData.hlsManifestUrl || null, - formats, + formats } } @@ -1983,7 +2240,7 @@ export class BaseClient { } } - async resolve(url, type, context, cipherManager) { + async resolve(url, _type, context, cipherManager) { const sourceName = 'youtube' const urlType = checkURLType(url, 'youtube') const apiEndpoint = this.getApiEndpoint() @@ -2107,7 +2364,7 @@ export class BaseClient { } async getTrackUrl(decodedTrack, context, cipherManager) { - const sourceName = decodedTrack.sourceName || 'youtube' + const _sourceName = decodedTrack.sourceName || 'youtube' const headers = this.oauth ? await this.getAuthHeaders() : {} const { body: playerResponse, statusCode } = await this._makePlayerRequest( diff --git a/src/utils.js b/src/utils.js index 9513dec..a95d836 100644 --- a/src/utils.js +++ b/src/utils.js @@ -14,7 +14,6 @@ import packageJson from '../package.json' with { type: 'json' } import { DEFAULT_MAX_REDIRECTS, DISCORD_ID_REGEX, - HLS_SEGMENT_DOWNLOAD_CONCURRENCY_LIMIT, REDIRECT_STATUS_CODES, SEMVER_PATTERN } from './constants.js' @@ -203,19 +202,19 @@ function validateProperty(value, path, expected, validator) { if (value === undefined || value === null) { throw new Error( `Configuration error:\n` + - `- Property: ${path}\n` + - `- Problem: missing required value\n` + - `- Expected: ${expected}\n\n` + - `Please define ${path} in your config.js file.` + `- Property: ${path}\n` + + `- Problem: missing required value\n` + + `- Expected: ${expected}\n\n` + + `Please define ${path} in your config.js file.` ) } if (!validator(value)) { throw new Error( `Configuration error:\n` + - `- Property: ${path}\n` + - `- Received: ${JSON.stringify(value)} (${typeof value})\n` + - `- Expected: ${expected}` + `- Property: ${path}\n` + + `- Received: ${JSON.stringify(value)} (${typeof value})\n` + + `- Expected: ${expected}` ) } } @@ -503,119 +502,348 @@ function verifyMethod( } function decodeTrack(encoded) { + if (!encoded) throw new Error('Decode Error: Input string is null or empty') + const buffer = Buffer.from(encoded, 'base64') let position = 0 + let step = 'init' + + const ensure = (n) => { + if (position + n > buffer.length) + throw new Error(`Unexpected end of buffer (need ${n} bytes)`) + } + + const readModifiedUTF8From = (buf, pRef) => { + if (pRef.value + 2 > buf.length) + throw new Error('Unexpected end of buffer (need 2 bytes)') + const utflen = buf.readUInt16BE(pRef.value) + pRef.value += 2 + if (pRef.value + utflen > buf.length) + throw new Error(`Unexpected end of buffer (need ${utflen} bytes)`) + + const end = pRef.value + utflen + const chars = [] + let i = pRef.value + + while (i < end) { + const c = buf[i] & 0xff + + if (c < 0x80) { + i += 1 + chars.push(String.fromCharCode(c)) + continue + } + + if ((c & 0xe0) === 0xc0) { + if (i + 1 >= end) throw new Error('Malformed utf') + const c2 = buf[i + 1] & 0xff + if ((c2 & 0xc0) !== 0x80) throw new Error('Malformed utf') + const ch = ((c & 0x1f) << 6) | (c2 & 0x3f) + i += 2 + chars.push(String.fromCharCode(ch)) + continue + } + + if ((c & 0xf0) === 0xe0) { + if (i + 2 >= end) throw new Error('Malformed utf') + const c2 = buf[i + 1] & 0xff + const c3 = buf[i + 2] & 0xff + if ((c2 & 0xc0) !== 0x80 || (c3 & 0xc0) !== 0x80) + throw new Error('Malformed utf') + const ch = ((c & 0x0f) << 12) | ((c2 & 0x3f) << 6) | (c3 & 0x3f) + i += 3 + chars.push(String.fromCharCode(ch)) + continue + } - const read = { - byte: () => buffer[position++], - ushort: () => { - const value = buffer.readUInt16BE(position) - position += 2 - return value - }, - int: () => { - const value = buffer.readInt32BE(position) - position += 4 - return value - }, - long: () => { - const value = buffer.readBigInt64BE(position) - position += 8 - return value - }, - utf: () => { - const length = read.ushort() - const value = buffer.toString('utf8', position, position + length) - position += length - return value + throw new Error('Malformed utf') } + + pRef.value = end + return chars.join('') } - const firstInt = read.int() - const isVersioned = ((firstInt & 0xc0000000) >> 30) & 1 - const version = isVersioned ? read.byte() : 1 + const readNullableTextFrom = (buf, pRef) => { + if (pRef.value + 1 > buf.length) + throw new Error('Unexpected end of buffer (need 1 byte)') + const present = buf[pRef.value++] !== 0 + return present ? readModifiedUTF8From(buf, pRef) : null + } - return { - encoded: encoded, - info: { - title: read.utf(), - author: read.utf(), - length: Number(read.long()), - identifier: read.utf(), - isSeekable: !!read.byte(), - isStream: !!read.byte(), - uri: version >= 2 && read.byte() ? read.utf() : null, - artworkUrl: version === 3 && read.byte() ? read.utf() : null, - isrc: version === 3 && read.byte() ? read.utf() : null, - sourceName: read.utf(), - position: Number(read.long()) - }, - pluginInfo: {}, - userData: {} + const decodeDetailsAsList = (detailsBuf) => { + let p = 0 + const ensure2 = (n) => { + if (p + n > detailsBuf.length) + throw new Error('Unexpected end of details') + } + + const readUTF2 = () => { + ensure2(2) + const utflen = detailsBuf.readUInt16BE(p) + p += 2 + ensure2(utflen) + + const end = p + utflen + const chars = [] + let i = p + + while (i < end) { + const c = detailsBuf[i] & 0xff + + if (c < 0x80) { + i += 1 + chars.push(String.fromCharCode(c)) + continue + } + + if ((c & 0xe0) === 0xc0) { + if (i + 1 >= end) throw new Error('Malformed utf') + const c2 = detailsBuf[i + 1] & 0xff + if ((c2 & 0xc0) !== 0x80) throw new Error('Malformed utf') + const ch = ((c & 0x1f) << 6) | (c2 & 0x3f) + i += 2 + chars.push(String.fromCharCode(ch)) + continue + } + + if ((c & 0xf0) === 0xe0) { + if (i + 2 >= end) throw new Error('Malformed utf') + const c2 = detailsBuf[i + 1] & 0xff + const c3 = detailsBuf[i + 2] & 0xff + if ((c2 & 0xc0) !== 0x80 || (c3 & 0xc0) !== 0x80) + throw new Error('Malformed utf') + const ch = ((c & 0x0f) << 12) | ((c2 & 0x3f) << 6) | (c3 & 0x3f) + i += 3 + chars.push(String.fromCharCode(ch)) + continue + } + + throw new Error('Malformed utf') + } + + p = end + return chars.join('') + } + + const readNullable2 = () => { + ensure2(1) + const present = detailsBuf[p++] !== 0 + return present ? readUTF2() : null + } + + const out = [] + while (p < detailsBuf.length) out.push(readNullable2()) + while (out.length && out[out.length - 1] === null) out.pop() + return out } -} -function encodeTrack(track) { - const bufferArray = [] - - function write(type, value) { - if (type === 'byte') bufferArray.push(Buffer.from([value])) - if (type === 'ushort') { - const buf = Buffer.alloc(2) - buf.writeUInt16BE(value) - bufferArray.push(buf) + const tryParseSeekableTrailer = (buf) => { + let p = 0 + try { + if (buf.length < 1) return { ok: false } + const present = buf[p++] !== 0 + if (!present) return { ok: false } + const pRef = { value: p } + const s = readModifiedUTF8From(buf, pRef) + if (pRef.value !== buf.length) return { ok: false } + if (s === 'NLK:seekableY') return { ok: true, seekable: true } + if (s === 'NLK:seekableN') return { ok: true, seekable: false } + return { ok: false } + } catch { + return { ok: false } } - if (type === 'int') { - const buf = Buffer.alloc(4) - buf.writeInt32BE(value) - bufferArray.push(buf) + } + + try { + step = 'messageHeader' + ensure(4) + const header = buffer.readInt32BE(position) + position += 4 + + const flags = (header >>> 30) & 0x3 + const messageSize = header & 0x3fffffff + if (messageSize === 0) throw new Error('message size: 0') + + step = 'messageBody' + ensure(messageSize) + let messageBuf = buffer.subarray(position, position + messageSize) + position += messageSize + + let seekable + { + const tailTryMax = Math.min(messageBuf.length, 512) + for (let cut = 1; cut <= tailTryMax; cut++) { + const tail = messageBuf.subarray(messageBuf.length - cut) + const parsed = tryParseSeekableTrailer(tail) + if (parsed.ok) { + seekable = parsed.seekable + messageBuf = messageBuf.subarray(0, messageBuf.length - cut) + break + } + } } - if (type === 'long') { - const buf = Buffer.alloc(8) - buf.writeBigInt64BE(BigInt(value)) - bufferArray.push(buf) + + step = 'payload' + const pRef = { value: 0 } + + if (pRef.value + 1 > messageBuf.length) + throw new Error('Unexpected end of message (need 1 byte)') + const version = messageBuf[pRef.value++] & 0xff + + const title = readModifiedUTF8From(messageBuf, pRef) + const author = readModifiedUTF8From(messageBuf, pRef) + + if (pRef.value + 8 > messageBuf.length) + throw new Error('Unexpected end of message (need 8 bytes)') + const length = Number(messageBuf.readBigInt64BE(pRef.value)) + pRef.value += 8 + + const identifier = readModifiedUTF8From(messageBuf, pRef) + + if (pRef.value + 1 > messageBuf.length) + throw new Error('Unexpected end of message (need 1 byte)') + const isStream = messageBuf[pRef.value++] !== 0 + + const uri = version >= 2 ? readNullableTextFrom(messageBuf, pRef) : null + const artworkUrl = + version >= 3 ? readNullableTextFrom(messageBuf, pRef) : null + const isrc = version >= 3 ? readNullableTextFrom(messageBuf, pRef) : null + + const sourceName = readModifiedUTF8From(messageBuf, pRef) + + if (messageBuf.length - pRef.value < 8) + throw new Error('Unexpected end of message (need 8 bytes for position)') + const positionOffset = messageBuf.length - 8 + + const detailsBuf = messageBuf.subarray(pRef.value, positionOffset) + + const trackPosition = Number(messageBuf.readBigInt64BE(positionOffset)) + + let details = [] + if (detailsBuf.length > 0) { + try { + details = decodeDetailsAsList(detailsBuf) + } catch { + details = [] + } } - if (type === 'utf') { - const strBuf = Buffer.from(value, 'utf8') - write('ushort', strBuf.length) - bufferArray.push(strBuf) + + return { + encoded, + info: { + title, + author, + length, + identifier, + isSeekable: typeof seekable === 'boolean' ? seekable : !isStream, + isStream, + uri, + artworkUrl, + isrc, + sourceName, + position: trackPosition + }, + details, + pluginInfo: {}, + userData: {}, + messageFlags: flags } + } catch (err) { + throw new Error( + `Decode Error at [${step}]: ${err.message} (Buffer pos: ${position}/${buffer.length})` + ) } +} - const version = track.artworkUrl || track.isrc ? 3 : track.uri ? 2 : 1 +function encodeTrack(track) { + if (!track || typeof track !== 'object') { + throw new Error('Encode Error: Input track must be a valid object') + } - const isVersioned = version > 1 ? 1 : 0 - const firstInt = isVersioned << 30 - write('int', firstInt) + const encodeModifiedUTF8 = (value) => { + const str = String(value) + const bytes = [] - if (isVersioned) { - write('byte', version) + for (let i = 0; i < str.length; i++) { + const ch = str.charCodeAt(i) + + if (ch >= 0x0001 && ch <= 0x007f) { + bytes.push(ch) + } else if (ch === 0x0000 || (ch >= 0x0080 && ch <= 0x07ff)) { + bytes.push(0xc0 | ((ch >> 6) & 0x1f)) + bytes.push(0x80 | (ch & 0x3f)) + } else { + bytes.push(0xe0 | ((ch >> 12) & 0x0f)) + bytes.push(0x80 | ((ch >> 6) & 0x3f)) + bytes.push(0x80 | (ch & 0x3f)) + } + } + + if (bytes.length > 65535) + throw new Error('Encode Error: UTF string too long') + + const lenBuf = Buffer.alloc(2) + lenBuf.writeUInt16BE(bytes.length) + return Buffer.concat([lenBuf, Buffer.from(bytes)]) } - write('utf', track.title) - write('utf', track.author) - write('long', track.length) - write('utf', track.identifier) - write('byte', track.isSeekable ? 1 : 0) - write('byte', track.isStream ? 1 : 0) + const chunks = [] + const push = (b) => chunks.push(b) - if (version >= 2) { - write('byte', track.uri ? 1 : 0) - if (track.uri) write('utf', track.uri) + const writeByte = (v) => push(Buffer.from([v & 0xff])) + const writeLong = (v) => { + const b = Buffer.alloc(8) + b.writeBigInt64BE(BigInt(v)) + push(b) + } + const writeUTF = (v) => push(encodeModifiedUTF8(v)) + const writeNullableText = (v) => { + if (v === undefined || v === null) { + writeByte(0) + } else { + writeByte(1) + writeUTF(String(v)) + } } - if (version === 3) { - write('byte', track.artworkUrl ? 1 : 0) - if (track.artworkUrl) write('utf', track.artworkUrl) + const version = track.artworkUrl || track.isrc ? 3 : track.uri ? 2 : 1 + const flags = 1 + + const seekable = + typeof track.isSeekable === 'boolean' + ? track.isSeekable + : typeof track?.info?.isSeekable === 'boolean' + ? track.info.isSeekable + : undefined + + writeByte(version) + writeUTF(track.title) + writeUTF(track.author) + writeLong(track.length) + writeUTF(track.identifier) + writeByte(track.isStream ? 1 : 0) + + if (version >= 2) writeNullableText(track.uri ?? null) + if (version >= 3) { + writeNullableText(track.artworkUrl ?? null) + writeNullableText(track.isrc ?? null) + } + + writeUTF(track.sourceName) + + writeLong(track.position ?? 0) - write('byte', track.isrc ? 1 : 0) - if (track.isrc) write('utf', track.isrc) + if (typeof seekable === 'boolean') { + writeNullableText(seekable ? 'NLK:seekableY' : 'NLK:seekableN') } - write('utf', track.sourceName) - write('long', track.position) + const messageBuf = Buffer.concat(chunks) + const header = (messageBuf.length & 0x3fffffff) | ((flags & 0x3) << 30) - return Buffer.concat(bufferArray).toString('base64') + const headerBuf = Buffer.alloc(4) + headerBuf.writeInt32BE(header) + + return Buffer.concat([headerBuf, messageBuf]).toString('base64') } const generateRandomLetters = (l) => @@ -647,15 +875,28 @@ function parseClient(agent) { return info } -const httpAgent = new http.Agent({ keepAlive: true }) -const httpsAgent = new https.Agent({ keepAlive: true }) +const httpAgent = new http.Agent({ + keepAlive: true, + maxFreeSockets: 32, + maxSockets: Infinity, + timeout: 60000 +}) +const httpsAgent = new https.Agent({ + keepAlive: true, + maxFreeSockets: 32, + maxSockets: Infinity, + timeout: 60000 +}) const http2FailedHosts = new Set() -setInterval(() => { - if (http2FailedHosts.size > 0) { - http2FailedHosts.clear() - } -}, 6 * 60 * 60 * 1000).unref() +setInterval( + () => { + if (http2FailedHosts.size > 0) { + http2FailedHosts.clear() + } + }, + 6 * 60 * 60 * 1000 +).unref() async function _internalHttp1Request(urlString, options = {}) { const { @@ -671,6 +912,9 @@ async function _internalHttp1Request(urlString, options = {}) { _redirectsFollowed = 0 } = options + const actualLocalAddress = + localAddress || global.nodelink?.routePlanner?.getIP() + if (_redirectsFollowed >= maxRedirects) { throw new Error(`Too many redirects (${maxRedirects}) for ${urlString}`) } @@ -717,7 +961,7 @@ async function _internalHttp1Request(urlString, options = {}) { port: currentUrl.port || (isHttps ? 443 : 80), path: currentUrl.pathname + currentUrl.search, headers: reqHeaders, - localAddress + localAddress: actualLocalAddress } return new Promise((resolve, reject) => { @@ -792,11 +1036,11 @@ async function _internalHttp1Request(urlString, options = {}) { }) req.on('error', (err) => reject(err)) - req.on('timeout', () => + req.on('timeout', () => { req.destroy( new Error(`Request timed out after ${timeout}ms for ${urlString}`) ) - ) + }) if (payloadBuffer) { req.end(payloadBuffer) @@ -834,7 +1078,7 @@ async function http1makeRequest(urlString, options = {}) { if (isRetryable && attempt < maxRetries) { attempt++ - const delay = 100 * Math.pow(2, attempt) + const delay = 100 * 2 ** attempt logger( 'warn', 'Network', @@ -860,6 +1104,7 @@ async function makeRequest(urlString, options, nodelink) { _redirectsFollowed = 0 } = options + const finalNodeLink = nodelink || global.nodelink const logId = crypto.randomBytes(4).toString('hex') if (loggingConfig.debug?.network) { logger('debug', 'Network', `[${logId}] Request: ${method} ${urlString}`) @@ -886,15 +1131,23 @@ async function makeRequest(urlString, options, nodelink) { new Error(`Too many redirects (${maxRedirects}) for ${urlString}`) ) } - const localAddress = nodelink?.routePlanner?.getIP() + const localAddress = finalNodeLink?.routePlanner?.getIP() try { const url = new URL(urlString) if (http2FailedHosts.has(url.host)) { - return http1makeRequest(urlString, { ...options, localAddress }, nodelink) + return http1makeRequest( + urlString, + { ...options, localAddress }, + finalNodeLink + ) } - } catch (e) { - return http1makeRequest(urlString, { ...options, localAddress }, nodelink) + } catch (_e) { + return http1makeRequest( + urlString, + { ...options, localAddress }, + finalNodeLink + ) } return new Promise((resolve, reject) => { @@ -910,9 +1163,9 @@ async function makeRequest(urlString, options, nodelink) { try { const url = new URL(urlString) http2FailedHosts.add(url.host) - } catch (e) {} + } catch (_e) {} resolve( - http1makeRequest(urlString, { ...options, localAddress }, nodelink) + http1makeRequest(urlString, { ...options, localAddress }, finalNodeLink) ) } @@ -982,7 +1235,7 @@ async function makeRequest(urlString, options, nodelink) { const statusCode = headers[':status'] if (statusCode === 429) { - nodelink?.routePlanner?.banIP(localAddress) + finalNodeLink?.routePlanner?.banIP(localAddress) } if (REDIRECT_STATUS_CODES.includes(statusCode) && headers.location) { @@ -1017,7 +1270,7 @@ async function makeRequest(urlString, options, nodelink) { ? disableBodyCompression : undefined }, - nodelink + finalNodeLink ) ) } @@ -1103,7 +1356,7 @@ async function makeRequest(urlString, options, nodelink) { } else { req.end() } - } catch (err) { + } catch (_err) { if (session && !session.closed && !session.destroyed && !sessionClosed) { session.close() } @@ -1112,7 +1365,7 @@ async function makeRequest(urlString, options, nodelink) { }) } -function loadHLS(url, stream, onceEnded = false, shouldEnd = true) { +function loadHLS(url, stream, _onceEnded = false, shouldEnd = true) { //biome-ignore lint: no-promise-executor-return return new Promise(async (resolve) => { try { @@ -1145,61 +1398,33 @@ function loadHLS(url, stream, onceEnded = false, shouldEnd = true) { if (lines[i].startsWith('#EXT-X-ENDLIST')) sawEnd = true } - const downloadPromises = [] - - const writeChunksToStream = async (chunks) => { - for (const chunk of chunks) { - if (!stream.write(chunk)) { - await new Promise((ok) => stream.once('drain', ok)) - } - } - } - for (const segUrl of segs) { if (stream.destroyed) break - const downloadPromise = http1makeRequest(segUrl, { - method: 'GET', - streamOnly: true - }) - .then((s) => { - return new Promise((res, rej) => { - const chunks = [] - s.stream.on('data', (chunk) => chunks.push(chunk)) - s.stream.on('end', () => res(chunks)) - s.stream.on('error', rej) - }) - }) - .catch((err) => { - if (!stream.destroyed) { - console.error( - '[HLS] Error downloading segment', - err.code || err.message - ) - stream.destroy(err) - } - return Promise.reject(err) + try { + const s = await http1makeRequest(segUrl, { + method: 'GET', + streamOnly: true }) - downloadPromises.push(downloadPromise) + if (!s.stream) continue - if (downloadPromises.length >= HLS_SEGMENT_DOWNLOAD_CONCURRENCY_LIMIT) { - if (stream.destroyed) break - try { - const chunks = await downloadPromises.shift() - await writeChunksToStream(chunks) - } catch (e) { - break + await new Promise((res, rej) => { + s.stream.pipe(stream, { end: false }) + s.stream.on('end', res) + s.stream.on('error', rej) + stream.on('error', () => { + s.stream.destroy() + rej(new Error('Destination stream destroyed')) + }) + }) + } catch (err) { + if (!stream.destroyed) { + console.error( + '[HLS] Error downloading segment', + err.code || err.message + ) } - } - } - - while (downloadPromises.length > 0) { - if (stream.destroyed) break - try { - const chunks = await downloadPromises.shift() - await writeChunksToStream(chunks) - } catch (e) { break } } @@ -1330,41 +1555,181 @@ export function cleanupHttpAgents() { function applyEnvOverrides(config, prefix = 'NODELINK') { for (const key in config) { - if (Object.prototype.hasOwnProperty.call(config, key)) { - const envVarName = `${prefix}_${key.toUpperCase()}`; - const envValue = process.env[envVarName]; + if (Object.hasOwn(config, key)) { + const envVarName = `${prefix}_${key.toUpperCase()}` + const envValue = process.env[envVarName] if (envValue !== undefined) { if (typeof config[key] === 'boolean') { - config[key] = envValue.toLowerCase() === 'true'; + config[key] = envValue.toLowerCase() === 'true' } else if (typeof config[key] === 'number') { - const numValue = Number(envValue); - if (!isNaN(numValue)) { - config[key] = numValue; + const numValue = Number(envValue) + if (!Number.isNaN(numValue)) { + config[key] = numValue } else { - logger('warn', 'Config', `Environment variable ${envVarName} has non-numeric value "${envValue}"; expected a number, keeping default.`) + logger( + 'warn', + 'Config', + `Environment variable ${envVarName} has non-numeric value "${envValue}"; expected a number, keeping default.` + ) } } else if (typeof config[key] === 'string') { - config[key] = envValue; + config[key] = envValue } else if (Array.isArray(config[key])) { + let newValue = null try { - const parsedArray = JSON.parse(envValue); - if (Array.isArray(parsedArray)) { - config[key] = parsedArray; - } else { - logger('warn', 'Config', `Environment variable ${envVarName} has non-array JSON value "${envValue}"; expected a JSON array, keeping default.`) - } - } catch (e) { - logger('warn', 'Config', `Environment variable ${envVarName} has non-JSON or invalid JSON value "${envValue}"; expected a JSON array, keeping default.`) + const parsedArray = JSON.parse(envValue) + if (Array.isArray(parsedArray)) newValue = parsedArray + } catch (_e) {} + + if (!newValue) { + const splitValue = envValue + .split(',') + .map((s) => s.trim()) + .filter(Boolean) + if (splitValue.length > 0) newValue = splitValue + } + + if (newValue) { + config[key] = newValue + } else { + logger( + 'warn', + 'Config', + `Environment variable ${envVarName} has invalid array value "${envValue}"; keeping default.` + ) } } - } else if (typeof config[key] === 'object' && config[key] !== null && !Array.isArray(config[key])) { - applyEnvOverrides(config[key], envVarName); + } else if ( + typeof config[key] === 'object' && + config[key] !== null && + !Array.isArray(config[key]) + ) { + applyEnvOverrides(config[key], envVarName) } } } } +function getBestMatch(list, original, options = {}) { + const { durationTolerance = 0.15, allowExplicit = true } = options + + const normalize = (str) => { + if (!str) return '' + return str + .toLowerCase() + .replace(/feat\.?/g, '') + .replace(/ft\.?/g, '') + .replace( + /\s*\([^)]*(official|video|audio|mv|visualizer|color\s*coded|hd|4k|prod\.)[^)]*\)/gi, + '' + ) + .replace( + /\s*\[[^\]]*(official|video|audio|mv|visualizer|color\s*coded|hd|4k|prod\.)[^\]]*\]/gi, + '' + ) + .replace(/[^\w\s]/g, '') + .trim() + } + + const specKeywords = [ + 'remix', + 'orchestral', + 'live', + 'cover', + 'acoustic', + 'instrumental', + 'karaoke', + 'radio', + 'edit', + 'extended', + 'slowed', + 'reverb' + ] + const findSpec = (str) => + specKeywords.filter((k) => str.toLowerCase().includes(k)) + + const originalTitle = original.title.toLowerCase() + const originalSpec = findSpec(originalTitle) + const isOriginalExplicit = + original.uri?.includes('explicit=true') || + originalTitle.includes('explicit') + + const targetDuration = original.length + const allowedDiff = targetDuration * durationTolerance + const normOriginalAuthor = normalize(original.author) + const originalWords = new Set( + normalize(original.title) + .split(' ') + .filter((w) => w.length > 1) + ) + + const scored = list.map((item) => { + const itemTitle = item.info.title.toLowerCase() + const normItemTitle = normalize(itemTitle) + const normItemAuthor = normalize(item.info.author) + const itemSpec = findSpec(itemTitle) + const isItemClean = + itemTitle.includes('clean') || itemTitle.includes('radio edit') + let score = 0 + + const itemWords = normItemTitle.split(' ').filter((w) => w.length > 1) + const itemWordsSet = new Set(itemWords) + + let overlap = 0 + for (const word of originalWords) { + if (itemWordsSet.has(word)) overlap++ + } + score += (overlap / Math.max(originalWords.size, 1)) * 300 + + for (const spec of specKeywords) { + const inOriginal = originalSpec.includes(spec) + const inItem = itemSpec.includes(spec) + if (inOriginal && inItem) score += 200 + if (inOriginal !== inItem) score -= 300 + } + + if (isOriginalExplicit && !allowExplicit) { + if (isItemClean) score += 500 + } + + if ( + normItemAuthor.includes(normOriginalAuthor) || + normOriginalAuthor.includes(normItemAuthor) + ) { + score += 150 + } else { + const longer = + normOriginalAuthor.length > normItemAuthor.length + ? normOriginalAuthor + : normItemAuthor + const shorter = + normOriginalAuthor.length > normItemAuthor.length + ? normItemAuthor + : normOriginalAuthor + if (shorter.length > 2 && longer.includes(shorter)) score += 100 + } + + if (targetDuration > 0) { + const diff = Math.abs(item.info.length - targetDuration) + if (diff <= allowedDiff) { + score += (1 - diff / allowedDiff) * 100 + } else { + score -= 100 + } + } + + if (itemTitle.includes('official audio') || itemTitle.includes('topic')) + score += 50 + + return { item, score } + }) + + scored.sort((a, b) => b.score - a.score) + + return scored[0]?.item || list[0] || null +} + function cleanupLogger() { if (logRotationInterval) { clearInterval(logRotationInterval) @@ -1404,5 +1769,6 @@ export { loadHLS, checkForUpdates, sendErrorResponse, - applyEnvOverrides + applyEnvOverrides, + getBestMatch } diff --git a/src/voice/voiceFrames.js b/src/voice/voiceFrames.js new file mode 100644 index 0000000..96f1fc0 --- /dev/null +++ b/src/voice/voiceFrames.js @@ -0,0 +1,118 @@ +const VOICE_FRAME_OPS = { + start: 1, + stop: 2, + data: 3 +} + +const VOICE_FORMATS = { + opus: 0, + ogg: 1, + pcm_s16le: 2 +} + +const EMPTY_BUFFER = Buffer.alloc(0) +const SUPPORTED_FORMATS = new Set(['opus', 'pcm_s16le']) + +function resolveVoiceFormat(format, logger) { + const normalized = String(format || 'opus').toLowerCase() + if (SUPPORTED_FORMATS.has(normalized)) { + return { name: normalized, code: VOICE_FORMATS[normalized] } + } + + if (logger) { + logger( + 'warn', + 'Voice', + `Unsupported voiceReceive.format "${format}", using "opus".` + ) + } + + return { name: 'opus', code: VOICE_FORMATS.opus } +} + +function buildVoiceFrame( + op, + formatCode, + guildId, + userId, + ssrc, + timestamp, + payload = EMPTY_BUFFER +) { + const guildBuf = Buffer.from(String(guildId || ''), 'utf8') + const userBuf = Buffer.from(String(userId || ''), 'utf8') + + if (guildBuf.length > 255 || userBuf.length > 255) { + throw new Error('Voice frame id too long.') + } + + const payloadBuf = payload?.length ? payload : EMPTY_BUFFER + const totalLength = + 1 + 1 + 1 + guildBuf.length + 1 + userBuf.length + 4 + 4 + payloadBuf.length + + const buf = Buffer.allocUnsafe(totalLength) + let offset = 0 + + buf.writeUInt8(op, offset++) + buf.writeUInt8(formatCode, offset++) + buf.writeUInt8(guildBuf.length, offset++) + guildBuf.copy(buf, offset) + offset += guildBuf.length + buf.writeUInt8(userBuf.length, offset++) + userBuf.copy(buf, offset) + offset += userBuf.length + buf.writeUInt32BE(ssrc >>> 0, offset) + offset += 4 + buf.writeUInt32BE(timestamp >>> 0, offset) + offset += 4 + + if (payloadBuf.length > 0) { + payloadBuf.copy(buf, offset) + } + + return buf +} + +function parseVoiceFrameHeader(buf) { + if (!buf || buf.length < 8) return null + let offset = 0 + + const op = buf.readUInt8(offset++) + const format = buf.readUInt8(offset++) + + if (offset >= buf.length) return null + const guildLen = buf.readUInt8(offset++) + if (offset + guildLen > buf.length) return null + const guildId = buf.toString('utf8', offset, offset + guildLen) + offset += guildLen + + if (offset >= buf.length) return null + const userLen = buf.readUInt8(offset++) + if (offset + userLen > buf.length) return null + const userId = buf.toString('utf8', offset, offset + userLen) + offset += userLen + + if (offset + 8 > buf.length) return null + const ssrc = buf.readUInt32BE(offset) + offset += 4 + const timestamp = buf.readUInt32BE(offset) + offset += 4 + + return { + op, + format, + guildId, + userId, + ssrc, + timestamp, + payloadOffset: offset + } +} + +export { + VOICE_FRAME_OPS, + VOICE_FORMATS, + resolveVoiceFormat, + buildVoiceFrame, + parseVoiceFrameHeader +} diff --git a/src/voice/voiceRelay.js b/src/voice/voiceRelay.js new file mode 100644 index 0000000..7e16c9f --- /dev/null +++ b/src/voice/voiceRelay.js @@ -0,0 +1,169 @@ +import discordVoice from '@performanc/voice' +import { Decoder as OpusDecoder } from '../playback/opus/Opus.js' +import { + buildVoiceFrame, + resolveVoiceFormat, + VOICE_FORMATS, + VOICE_FRAME_OPS +} from './voiceFrames.js' + +const EMPTY_BUFFER = Buffer.alloc(0) + +export function createVoiceRelay({ enabled, format, sendFrame, logger }) { + if (!enabled || typeof sendFrame !== 'function') { + return null + } + + const formatInfo = resolveVoiceFormat(format, logger) + const activeStreams = new Map() + let pcmEnabled = formatInfo.name === 'pcm_s16le' + let activeFormatCode = formatInfo.code + + const now = () => Date.now() >>> 0 + + const safeSend = (frame) => { + try { + sendFrame(frame) + } catch (err) { + if (logger) { + logger('error', 'Voice', `Failed to send voice frame: ${err.message}`) + } + } + } + + const cleanupStream = (key) => { + const entry = activeStreams.get(key) + if (!entry) return null + + entry.dataStream.off('data', entry.onData) + entry.dataStream.off('end', entry.onEnd) + entry.dataStream.off('close', entry.onEnd) + entry.dataStream.off('error', entry.onError) + + if (entry.decoder) { + try { + entry.stream.unpipe(entry.decoder) + entry.decoder.destroy() + } catch {} + } + + activeStreams.delete(key) + return entry + } + + const handleSpeakStart = (guildId, userId, ssrc) => { + const key = `${guildId}:${ssrc}` + if (activeStreams.has(key)) return + + const stream = discordVoice.getSpeakStream(ssrc) + if (!stream) return + + let decoder = null + let dataStream = stream + let formatCode = activeFormatCode + + if (pcmEnabled) { + try { + decoder = new OpusDecoder({ rate: 48000, channels: 2 }) + stream.pipe(decoder) + dataStream = decoder + } catch (err) { + pcmEnabled = false + activeFormatCode = VOICE_FORMATS.opus + formatCode = activeFormatCode + if (logger) { + logger( + 'warn', + 'Voice', + `PCM decode unavailable (${err.message}); sending opus instead.` + ) + } + } + } + + const startFrame = buildVoiceFrame( + VOICE_FRAME_OPS.start, + formatCode, + guildId, + userId, + ssrc, + now(), + EMPTY_BUFFER + ) + safeSend(startFrame) + + const onData = (chunk) => { + const frame = buildVoiceFrame( + VOICE_FRAME_OPS.data, + formatCode, + guildId, + userId, + ssrc, + now(), + chunk + ) + safeSend(frame) + } + + const onEnd = () => { + handleSpeakStop(guildId, userId, ssrc) + } + + const onError = (err) => { + if (logger) { + logger('warn', 'Voice', `Voice stream error: ${err?.message || err}`) + } + handleSpeakStop(guildId, userId, ssrc) + } + + dataStream.on('data', onData) + dataStream.once('end', onEnd) + dataStream.once('close', onEnd) + dataStream.once('error', onError) + + activeStreams.set(key, { + stream, + dataStream, + decoder, + formatCode, + onData, + onEnd, + onError, + userId + }) + } + + const handleSpeakStop = (guildId, userId, ssrc) => { + const key = `${guildId}:${ssrc}` + const entry = cleanupStream(key) + const finalUserId = entry?.userId || userId + const formatCode = entry?.formatCode ?? activeFormatCode + if (!finalUserId) return + + const stopFrame = buildVoiceFrame( + VOICE_FRAME_OPS.stop, + formatCode, + guildId, + finalUserId, + ssrc, + now(), + EMPTY_BUFFER + ) + safeSend(stopFrame) + } + + const attach = (connection, guildId) => { + if (!connection || connection._voiceRelayAttached) return + connection._voiceRelayAttached = true + + connection.on('speakStart', (userId, ssrc) => { + handleSpeakStart(guildId, userId, ssrc) + }) + + connection.on('speakEnd', (userId, ssrc) => { + handleSpeakStop(guildId, userId, ssrc) + }) + } + + return { attach } +} diff --git a/src/worker.js b/src/worker.js index 2953cf5..b735c19 100644 --- a/src/worker.js +++ b/src/worker.js @@ -1,16 +1,36 @@ +import net from 'node:net' +import os from 'node:os' +import { monitorEventLoopDelay } from 'node:perf_hooks' +import v8 from 'node:v8' import { GatewayEvents } from './constants.js' - -let lastCpuUsage = process.cpuUsage() -let lastCpuTime = Date.now() - import ConnectionManager from './managers/connectionManager.js' +import CredentialManager from './managers/credentialManager.js' import LyricsManager from './managers/lyricsManager.js' import PluginManager from './managers/pluginManager.js' import RoutePlannerManager from './managers/routePlannerManager.js' import SourceManager from './managers/sourceManager.js' import StatsManager from './managers/statsManager.js' +import { bufferPool } from './playback/BufferPool.js' import { Player } from './playback/player.js' -import { initLogger, logger } from './utils.js' +import { createPCMStream } from './playback/streamProcessor.js' +import { cleanupHttpAgents, initLogger, logger } from './utils.js' +import { createVoiceRelay } from './voice/voiceRelay.js' + +let lastCpuUsage = process.cpuUsage() +let lastCpuTime = Date.now() +let lastActivityTime = Date.now() +let isHibernating = false +let playerUpdateTimer = null +let statsUpdateTimer = null + +const hndl = monitorEventLoopDelay({ resolution: 10 }) +hndl.enable() + +try { + os.setPriority(os.constants.priority.PRIORITY_HIGH) +} catch (_e) { + // Ignore errors +} let config try { @@ -19,16 +39,214 @@ try { config = (await import('../config.default.js')).default } +const HIBERNATION_ENABLED = config.cluster?.hibernation?.enabled !== false + +const HIBERNATION_TIMEOUT = + config.cluster?.hibernation?.timeoutMs || 20 * 60 * 1000 + initLogger(config) const players = new Map() -const commandQueue = [] +const guildQueues = new Map() // guildId -> { queue: [], processing: false } +const activeStreams = new Map() + +let eventSocket = null +const eventSocketPath = process.env.EVENT_SOCKET_PATH + +if (eventSocketPath) { + const connect = () => { + const socket = net.createConnection(eventSocketPath, () => { + eventSocket = socket + logger('info', 'Worker', 'Connected to Master event socket') + }) + socket.on('error', () => { + eventSocket = null + setTimeout(connect, 1000) + }) + socket.on('close', () => { + eventSocket = null + setTimeout(connect, 1000) + }) + } + connect() +} + +let commandSocket = null +const commandSocketPath = process.env.COMMAND_SOCKET_PATH + +if (commandSocketPath) { + const connect = () => { + const socket = net.createConnection(commandSocketPath, () => { + commandSocket = socket + sendCommandHello() + logger('info', 'Worker', 'Connected to Master command socket') + }) + + let buffer = Buffer.alloc(0) + + socket.on('data', (chunk) => { + buffer = Buffer.concat([buffer, chunk]) + + while (buffer.length >= 6) { + const idSize = buffer.readUInt8(0) + const type = buffer.readUInt8(1) + const payloadSize = buffer.readUInt32BE(2) + const totalSize = 6 + idSize + payloadSize + + if (buffer.length < totalSize) break + + const id = buffer.toString('utf8', 6, 6 + idSize) + const payload = buffer.subarray(6 + idSize, totalSize) + buffer = buffer.subarray(totalSize) + + if (type === 1) { + try { + const data = v8.deserialize(payload) + enqueueCommand(data?.type, id, data?.payload) + } catch (e) { + logger( + 'error', + 'Worker', + `Command socket parse error: ${e.message}` + ) + } + } + } + }) + + socket.on('error', () => { + commandSocket = null + setTimeout(connect, 1000) + }) + socket.on('close', () => { + commandSocket = null + setTimeout(connect, 1000) + }) + } + connect() +} + +function sendEventFrame(type, data) { + if (!eventSocket || eventSocket.destroyed) return false + + const payload = JSON.stringify(data) + const payloadBuf = Buffer.from(payload, 'utf8') + + const header = Buffer.alloc(6) + header.writeUInt8(0, 0) // No ID needed for these events + header.writeUInt8(type, 1) + header.writeUInt32BE(payloadBuf.length, 2) + + return eventSocket.write(Buffer.concat([header, payloadBuf])) +} + +function sendEventBinaryFrame(type, payloadBuf) { + if (!eventSocket || eventSocket.destroyed) return false + + const header = Buffer.alloc(6) + header.writeUInt8(0, 0) + header.writeUInt8(type, 1) + header.writeUInt32BE(payloadBuf.length, 2) + + return eventSocket.write(Buffer.concat([header, payloadBuf])) +} + +function sendStreamFrame(streamId, type, payloadBuf) { + if (!eventSocket || eventSocket.destroyed) return false + + const idBuf = Buffer.from(streamId, 'utf8') + const header = Buffer.alloc(6) + header.writeUInt8(idBuf.length, 0) + header.writeUInt8(type, 1) + header.writeUInt32BE(payloadBuf.length, 2) + + return eventSocket.write(Buffer.concat([header, idBuf, payloadBuf])) +} + +function sendStreamChunk(streamId, chunk) { + const payload = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) + sendStreamFrame(streamId, 5, payload) +} + +function sendStreamEnd(streamId) { + sendStreamFrame(streamId, 6, Buffer.alloc(0)) +} + +function sendStreamError(streamId, error) { + const payload = Buffer.from(String(error || 'Unknown error'), 'utf8') + sendStreamFrame(streamId, 7, payload) +} + +function sendCommandFrame(type, requestId, payloadBuf) { + if (!commandSocket || commandSocket.destroyed) return false + + const idBuf = Buffer.from(requestId || '', 'utf8') + const header = Buffer.alloc(6) + header.writeUInt8(idBuf.length, 0) + header.writeUInt8(type, 1) + header.writeUInt32BE(payloadBuf.length, 2) + + return commandSocket.write(Buffer.concat([header, idBuf, payloadBuf])) +} + +function sendCommandHello() { + if (!commandSocket || commandSocket.destroyed) return false + const payload = Buffer.from(JSON.stringify({ pid: process.pid }), 'utf8') + return sendCommandFrame(0, '', payload) +} + +function sendCommandResult(requestId, payload) { + const payloadBuf = v8.serialize(payload) + if (sendCommandFrame(2, requestId, payloadBuf)) return true + + if (process.connected) { + try { + process.send({ type: 'commandResult', requestId, payload }) + return true + } catch (e) { + logger( + 'error', + 'Worker-IPC', + `Failed to send commandResult for ${requestId}: ${e.message}` + ) + } + } + return false +} + +function sendCommandError(requestId, error) { + const payloadBuf = v8.serialize(String(error || 'Unknown error')) + if (sendCommandFrame(3, requestId, payloadBuf)) return true + + if (process.connected) { + try { + process.send({ type: 'commandResult', requestId, error: String(error) }) + return true + } catch (e) { + logger( + 'error', + 'Worker-IPC', + `Failed to send commandResult (error) for ${requestId}: ${e.message}` + ) + } + } + return false +} + const nodelink = { options: config, logger } +nodelink.voiceRelay = createVoiceRelay({ + enabled: config.voiceReceive?.enabled, + format: config.voiceReceive?.format, + sendFrame: (frame) => sendEventBinaryFrame(8, frame), + logger +}) + nodelink.statsManager = new StatsManager(nodelink) +nodelink.credentialManager = new CredentialManager(nodelink) nodelink.sources = new SourceManager(nodelink) nodelink.lyrics = new LyricsManager(nodelink) nodelink.routePlanner = new RoutePlannerManager(nodelink) @@ -48,6 +266,204 @@ nodelink.extensions = { audioInterceptors: [] } +function setEfficiencyMode(enabled) { + try { + os.setPriority( + process.pid, + enabled + ? os.constants.priority.PRIORITY_LOW + : os.constants.priority.PRIORITY_HIGH + ) + if (enabled) { + v8.setFlagsFromString('--optimize-for-size') + } else { + v8.setFlagsFromString('--no-optimize-for-size') + } + } catch (_e) {} +} + +function startTimers(hibernating = false) { + if (playerUpdateTimer) clearInterval(playerUpdateTimer) + if (statsUpdateTimer) clearInterval(statsUpdateTimer) + + const updateInterval = hibernating + ? 60000 + : (config?.playerUpdateInterval ?? 5000) + const statsInterval = hibernating + ? 120000 + : config?.metrics?.enabled + ? 5000 + : (config?.statsUpdateInterval ?? 30000) + const zombieThreshold = config?.zombieThresholdMs ?? 60000 + + playerUpdateTimer = setInterval(() => { + if (!process.connected) return + + for (const player of players.values()) { + if (player?.track && !player.isPaused && player.connection) { + if ( + player._lastStreamDataTime > 0 && + Date.now() - player._lastStreamDataTime >= zombieThreshold + ) { + logger( + 'warn', + 'Player', + `Player for guild ${player.guildId} detected as zombie (no stream data).` + ) + player.emitEvent(GatewayEvents.TRACK_STUCK, { + guildId: player.guildId, + track: player.track, + reason: 'no_stream_data', + thresholdMs: zombieThreshold + }) + } + try { + player._sendUpdate() + } catch (updateError) { + logger( + 'error', + 'Worker', + `Error during player update for guild ${player.guildId}: ${updateError.message}`, + updateError + ) + } + } + } + }, updateInterval) + + statsUpdateTimer = setInterval(() => { + if (!process.connected) return + + let localPlayers = 0 + let localPlayingPlayers = 0 + const localFrameStats = { sent: 0, nulled: 0, deficit: 0, expected: 0 } + + for (const player of players.values()) { + localPlayers++ + if (!player.isPaused && player.track) { + localPlayingPlayers++ + } + + if (player?.track && !player.isPaused && player.connection) { + if (player.connection.statistics) { + localFrameStats.sent += player.connection.statistics.packetsSent || 0 + localFrameStats.nulled += + player.connection.statistics.packetsLost || 0 + localFrameStats.expected += + player.connection.statistics.packetsExpected || 0 + } + } + } + + localFrameStats.deficit += Math.max( + 0, + localFrameStats.expected - localFrameStats.sent + ) + + if (localPlayers === 0 && HIBERNATION_ENABLED) { + if ( + !isHibernating && + Date.now() - lastActivityTime > HIBERNATION_TIMEOUT + ) { + logger( + 'info', + 'Worker', + 'Worker entering hibernation mode (Efficiency Mode).' + ) + isHibernating = true + bufferPool.clear() + cleanupHttpAgents() + nodelink.connectionManager.stop() + setEfficiencyMode(true) + startTimers(true) + + if (global.gc) { + let cycles = 0 + const aggressiveGC = setInterval(() => { + try { + global.gc() + cycles++ + if (cycles >= 3) clearInterval(aggressiveGC) + } catch (_e) { + clearInterval(aggressiveGC) + } + }, 1000) + } + } + } else { + lastActivityTime = Date.now() + if (isHibernating) { + isHibernating = false + setEfficiencyMode(false) + nodelink.connectionManager.start() + startTimers(false) + } + } + + try { + const now = Date.now() + const elapsedMs = now - lastCpuTime + const cpuUsage = process.cpuUsage(lastCpuUsage) + lastCpuTime = now + lastCpuUsage = process.cpuUsage() + + const nodelinkLoad = + elapsedMs > 0 ? (cpuUsage.user + cpuUsage.system) / 1000 / elapsedMs : 0 + + const mem = process.memoryUsage() + + const stats = { + workerId: parseInt(process.env.NODE_UNIQUE_ID || 0, 10) + 1, + isHibernating, + players: localPlayers, + playingPlayers: localPlayingPlayers, + commandQueueLength: Array.from(guildQueues.values()).reduce( + (acc, curr) => acc + curr.queue.length, + 0 + ), + cpu: { nodelinkLoad }, + eventLoopLag: hndl.mean / 1e6, + memory: { + used: mem.heapUsed, + allocated: mem.heapTotal + }, + frameStats: localFrameStats + } + + if (eventSocket && !eventSocket.destroyed) { + sendEventFrame(4, stats) + } else if (process.connected) { + const success = process.send({ + type: 'workerStats', + pid: process.pid, + stats + }) + + if (!success) { + logger( + 'warn', + 'Worker-IPC', + 'IPC channel saturated, skipping non-critical workerStats update.' + ) + } + } + } catch (e) { + if (process.connected) { + logger( + 'error', + 'Worker-IPC', + `Failed to send workerStats: ${e.message}` + ) + } + } + }, statsInterval) +} + +nodelink.extensions = { + workerInterceptors: [], + audioInterceptors: [] +} + nodelink.registerWorkerInterceptor = (fn) => { nodelink.extensions.workerInterceptors.push(fn) logger('info', 'Worker', 'Registered worker command interceptor') @@ -55,7 +471,11 @@ nodelink.registerWorkerInterceptor = (fn) => { nodelink.registerSource = (name, source) => { if (!nodelink.sources) { - logger('warn', 'Worker', 'Cannot register source (sources manager not ready).') + logger( + 'warn', + 'Worker', + 'Cannot register source (sources manager not ready).' + ) return } nodelink.sources.sources.set(name, source) @@ -69,16 +489,21 @@ nodelink.registerFilter = (name, filter) => { } nodelink.registerAudioInterceptor = (interceptor) => { - if (!nodelink.extensions.audioInterceptors) nodelink.extensions.audioInterceptors = [] + if (!nodelink.extensions.audioInterceptors) + nodelink.extensions.audioInterceptors = [] nodelink.extensions.audioInterceptors.push(interceptor) logger('info', 'Worker', 'Registered custom audio interceptor') } async function initialize() { + await nodelink.credentialManager.load() await nodelink.sources.loadFolder() await nodelink.lyrics.loadFolder() await nodelink.statsManager.initialize() await nodelink.pluginManager.load('worker') + + lastActivityTime = Date.now() + logger( 'info', 'Worker', @@ -87,6 +512,7 @@ async function initialize() { } initialize() +startTimers(false) process.on('uncaughtException', (err) => { const isStreamAbort = @@ -104,7 +530,7 @@ process.on('uncaughtException', (err) => { 'Worker–Crash', `Uncaught Exception: ${err.stack || err.message}` ) - process.exit(1) + process.stderr.write('', () => process.exit(1)) }) process.on('unhandledRejection', (reason, promise) => { @@ -115,10 +541,109 @@ process.on('unhandledRejection', (reason, promise) => { ) }) -async function processQueue() { - if (commandQueue.length === 0) return +function cleanupActiveStream(streamId, entry) { + const current = entry || activeStreams.get(streamId) + if (!current) return + + if (current.pcmStream && !current.pcmStream.destroyed) { + current.pcmStream.destroy() + } + if (current.fetched?.stream && !current.fetched.stream.destroyed) { + current.fetched.stream.destroy() + } + + activeStreams.delete(streamId) +} + +async function startLoadStream(streamId, payload) { + if (!eventSocket || eventSocket.destroyed) { + throw new Error('Event socket unavailable') + } + + const trackInfo = payload?.decodedTrackInfo + if (!trackInfo) { + throw new Error('Invalid encoded track') + } + + const urlResult = await nodelink.sources.getTrackUrl(trackInfo) + if (urlResult.exception) { + throw new Error(urlResult.exception.message || 'Failed to get track URL') + } + + const additionalData = { + ...(urlResult.additionalData || {}), + startTime: payload?.position || 0 + } + + const fetched = await nodelink.sources.getTrackStream( + urlResult.newTrack?.info || trackInfo, + urlResult.url, + urlResult.protocol, + additionalData + ) + + if (fetched.exception) { + throw new Error(fetched.exception.message || 'Failed to load stream') + } + + const pcmStream = createPCMStream( + fetched.stream, + fetched.type || urlResult.format, + nodelink, + (payload?.volume ?? 100) / 100, + payload?.filters || {} + ) + + const entry = { pcmStream, fetched, cancelled: false } + activeStreams.set(streamId, entry) + + const finish = (err) => { + if (entry.cancelled) { + cleanupActiveStream(streamId, entry) + return + } + + if (err) sendStreamError(streamId, err.message || err) + else sendStreamEnd(streamId) - const { type, requestId, payload } = commandQueue.shift() + cleanupActiveStream(streamId, entry) + } + + pcmStream.on('data', (chunk) => { + if (!entry.cancelled) sendStreamChunk(streamId, chunk) + }) + + pcmStream.once('end', () => finish()) + pcmStream.once('error', (err) => finish(err)) + pcmStream.once('close', () => finish()) +} + +function cancelStream(streamId) { + const entry = activeStreams.get(streamId) + if (!entry) return false + entry.cancelled = true + cleanupActiveStream(streamId, entry) + return true +} + +async function processQueue(queueKey) { + const queueEntry = guildQueues.get(queueKey) + if (!queueEntry || queueEntry.queue.length === 0) { + if (queueEntry) queueEntry.processing = false + return + } + + queueEntry.processing = true + const { type, requestId, payload } = queueEntry.queue.shift() + + lastActivityTime = Date.now() + if (isHibernating) { + logger('info', 'Worker', 'Worker waking up from hibernation.') + isHibernating = false + setEfficiencyMode(false) + nodelink.connectionManager.start() + startTimers(false) + } // Execute Worker Interceptors const interceptors = nodelink.extensions.workerInterceptors @@ -127,9 +652,7 @@ async function processQueue() { try { const shouldBlock = await interceptor(type, payload) if (shouldBlock === true) { - if (process.connected && requestId) { - process.send({ type: 'commandResult', requestId, payload: { intercepted: true } }) - } + if (requestId) sendCommandResult(requestId, { intercepted: true }) setImmediate(processQueue) return } @@ -155,7 +678,9 @@ async function processQueue() { userId: userId, socket: { send: (data) => { - if (process.connected) { + if (eventSocket && !eventSocket.destroyed) { + sendEventFrame(3, { sessionId, guildId, data }) + } else if (process.connected) { try { process.send({ type: 'playerEvent', @@ -243,7 +768,9 @@ async function processQueue() { userId: userId, socket: { send: (data) => { - if (process.connected) { + if (eventSocket && !eventSocket.destroyed) { + sendEventFrame(3, { sessionId, guildId, data }) + } else if (process.connected) { try { process.send({ type: 'playerEvent', @@ -317,14 +844,14 @@ async function processQueue() { } case 'loadLyrics': { - const { decodedTrack, language } = payload - result = await nodelink.lyrics.loadLyrics(decodedTrack, language) + const { decodedTrackInfo, language } = payload + result = await nodelink.lyrics.loadLyrics({ info: decodedTrackInfo }, language) break } case 'loadChapters': { - const { decodedTrack } = payload - result = await nodelink.sources.getChapters(decodedTrack) + const { decodedTrackInfo } = payload + result = await nodelink.sources.getChapters({ info: decodedTrackInfo }) break } case 'getSources': { @@ -337,13 +864,34 @@ async function processQueue() { break } + case 'loadStream': { + const streamId = payload?.streamId || requestId + try { + await startLoadStream(streamId, payload) + result = { streaming: true, streamId } + } catch (e) { + sendStreamError(streamId, e.message || e) + result = { streaming: false, error: e.message || String(e) } + } + break + } + + case 'cancelStream': { + const streamId = payload?.streamId || requestId + result = { cancelled: cancelStream(streamId) } + break + } + case 'updateYoutubeConfig': { try { const { refreshToken, visitorData } = payload const youtube = nodelink.sources.sources.get('youtube') if (!youtube) { - result = { success: false, reason: 'YouTube source not loaded on this worker' } + result = { + success: false, + reason: 'YouTube source not loaded on this worker' + } break } @@ -352,9 +900,17 @@ async function processQueue() { youtube.oauth.refreshToken = refreshToken youtube.oauth.accessToken = null youtube.oauth.tokenExpiry = 0 - logger('info', 'Worker', 'YouTube OAuth refresh token updated via API.') + logger( + 'info', + 'Worker', + 'YouTube OAuth refresh token updated via API.' + ) } else { - logger('warn', 'Worker', 'Cannot update refreshToken: youtube.oauth is undefined.') + logger( + 'warn', + 'Worker', + 'Cannot update refreshToken: youtube.oauth is undefined.' + ) } } @@ -363,13 +919,21 @@ async function processQueue() { youtube.ytContext.client.visitorData = visitorData logger('info', 'Worker', 'YouTube visitorData updated via API.') } else { - logger('warn', 'Worker', 'Cannot update visitorData: youtube.ytContext.client is undefined.') + logger( + 'warn', + 'Worker', + 'Cannot update visitorData: youtube.ytContext.client is undefined.' + ) } } result = { success: true } } catch (err) { - logger('error', 'Worker', `Error updating YouTube config: ${err.message}`) + logger( + 'error', + 'Worker', + `Error updating YouTube config: ${err.message}` + ) result = { success: false, error: err.message } } break @@ -379,36 +943,35 @@ async function processQueue() { throw new Error(`Unknown command type: ${type}`) } - if (process.connected) { - try { - process.send({ type: 'commandResult', requestId, payload: result }) - } catch (e) { - logger( - 'error', - 'Worker-IPC', - `Failed to send commandResult for ${requestId}: ${e.message}` - ) - } - } + if (requestId) sendCommandResult(requestId, result) } catch (e) { - if (process.connected) { - try { - process.send({ type: 'commandResult', requestId, error: e.message }) - } catch (e) { - logger( - 'error', - 'Worker-IPC', - `Failed to send commandResult (error) for ${requestId}: ${e.message}` - ) - } - } + if (requestId) sendCommandError(requestId, e.message) } finally { - if (commandQueue.length > 0) { - setImmediate(processQueue) + const queueEntry = guildQueues.get(queueKey) + if (queueEntry && queueEntry.queue.length > 0) { + setImmediate(() => processQueue(queueKey)) + } else { + if (queueEntry) queueEntry.processing = false } } } +function enqueueCommand(type, requestId, payload) { + if (!type || !requestId) return + + const guildId = payload?.guildId || 'global' + if (!guildQueues.has(guildId)) { + guildQueues.set(guildId, { queue: [], processing: false }) + } + + const queueEntry = guildQueues.get(guildId) + queueEntry.queue.push({ type, requestId, payload }) + + if (!queueEntry.processing) { + setImmediate(() => processQueue(guildId)) + } +} + process.on('message', (msg) => { if (msg.type === 'ping') { if (process.connected) { @@ -423,16 +986,9 @@ process.on('message', (msg) => { if (!msg.type || !msg.requestId) return - commandQueue.push(msg) - - if (commandQueue.length === 1) { - setImmediate(processQueue) - } + enqueueCommand(msg.type, msg.requestId, msg.payload) }) -const updateInterval = config?.playerUpdateInterval ?? 5000 -const zombieThreshold = config?.zombieThresholdMs ?? 60000 - setTimeout(() => { if (process.connected) { try { @@ -442,90 +998,3 @@ setTimeout(() => { } } }, 1000) - -setInterval(() => { - if (!process.connected) return - - let localPlayers = 0 - let localPlayingPlayers = 0 - const localFrameStats = { sent: 0, nulled: 0, deficit: 0, expected: 0 } - - for (const player of players.values()) { - localPlayers++ - if (!player.isPaused && player.track) { - localPlayingPlayers++ - } - - if (player?.track && !player.isPaused && player.connection) { - if (player.connection.statistics) { - localFrameStats.sent += player.connection.statistics.packetsSent || 0 - localFrameStats.nulled += player.connection.statistics.packetsLost || 0 - localFrameStats.expected += - player.connection.statistics.packetsExpected || 0 - } - - if ( - player._lastStreamDataTime > 0 && - Date.now() - player._lastStreamDataTime >= zombieThreshold - ) { - logger( - 'warn', - 'Player', - `Player for guild ${player.guildId} detected as zombie (no stream data).` - ) - player.emitEvent(GatewayEvents.TRACK_STUCK, { - guildId: player.guildId, - track: player.track, - reason: 'no_stream_data', - thresholdMs: zombieThreshold - }) - } - try { - player._sendUpdate() - } catch (updateError) { - logger( - 'error', - 'Worker', - `Error during player update for guild ${player.guildId}: ${updateError.message}`, - updateError - ) - } - } - } - - localFrameStats.deficit += Math.max( - 0, - localFrameStats.expected - localFrameStats.sent - ) - - try { - const now = Date.now() - const elapsedMs = now - lastCpuTime - const cpuUsage = process.cpuUsage(lastCpuUsage) - lastCpuTime = now - lastCpuUsage = process.cpuUsage() - - const nodelinkLoad = - elapsedMs > 0 ? (cpuUsage.user + cpuUsage.system) / 1000 / elapsedMs : 0 - - const mem = process.memoryUsage() - - process.send({ - type: 'workerStats', - pid: process.pid, - stats: { - players: localPlayers, - playingPlayers: localPlayingPlayers, - commandQueueLength: commandQueue.length, - cpu: { nodelinkLoad }, - memory: { - used: mem.heapUsed, - allocated: mem.heapTotal - }, - frameStats: localFrameStats - } - }) - } catch (e) { - logger('error', 'Worker-IPC', `Failed to send workerStats: ${e.message}`) - } -}, updateInterval)