diff --git a/.github/workflows/rust-core-ci.yml b/.github/workflows/rust-core-ci.yml index 87d8d2d..979c901 100644 --- a/.github/workflows/rust-core-ci.yml +++ b/.github/workflows/rust-core-ci.yml @@ -19,11 +19,32 @@ jobs: - name: Run Rust tests run: cargo test -p amplifier-core --verbose - name: Check workspace - run: cargo check --workspace + run: cargo check -p amplifier-core -p amplifier-core-py - name: Rustfmt - run: cargo fmt --check + run: cargo fmt -p amplifier-core -p amplifier-core-py --check - name: Clippy - run: cargo clippy --workspace -- -D warnings + run: cargo clippy -p amplifier-core -p amplifier-core-py -- -D warnings + + node-tests: + name: Node.js Binding Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + - uses: actions/setup-node@v4 + with: + node-version: '20' + - name: Build native module + working-directory: bindings/node + run: | + npm install + npm run build + - name: Run tests + working-directory: bindings/node + run: npx vitest run + - name: Clippy (Node binding) + run: cargo clippy -p amplifier-core-node -- -D warnings python-tests: name: Python Tests (${{ matrix.python-version }}) diff --git a/Cargo.lock b/Cargo.lock index 6a1520b..9c89bd0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,24 +4,18 @@ version = 4 [[package]] name = "addr2line" -version = "0.24.2" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "9698bf0769c641b18618039fe2ebd41eb3541f98433000f64e663fab7cea2c87" dependencies = [ "gimli", ] [[package]] -name = "ahash" -version = "0.8.12" +name = "adler2" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aho-corasick" @@ -38,9 +32,15 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" +[[package]] +name = "ambient-authority" +version = "0.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9d4ee0d472d1cd2e28c97dfa124b3d8d992e10eb0a035f33f5d12e3a177ba3b" + [[package]] name = "amplifier-core" -version = "1.0.10" +version = "1.0.11" dependencies = [ "chrono", "log", @@ -48,18 +48,34 @@ dependencies = [ "rand", "serde", "serde_json", + "tempfile", "thiserror 2.0.18", "tokio", "tokio-stream", + "toml 0.8.23", "tonic", "tonic-build", "uuid", "wasmtime", + "wasmtime-wasi", ] [[package]] -name = "amplifier-core-py" +name = "amplifier-core-node" version = "1.0.10" +dependencies = [ + "amplifier-core", + "log", + "napi", + "napi-build", + "napi-derive", + "serde_json", + "tokio", +] + +[[package]] +name = "amplifier-core-py" +version = "1.0.11" dependencies = [ "amplifier-core", "log", @@ -71,6 +87,16 @@ dependencies = [ "uuid", ] +[[package]] +name = "amplifier-guest" +version = "0.1.0" +dependencies = [ + "prost", + "serde", + "serde_json", + "wit-bindgen 0.41.0", +] + [[package]] name = "android_system_properties" version = "0.1.5" @@ -86,15 +112,6 @@ version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" -[[package]] -name = "ar_archive_writer" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b" -dependencies = [ - "object 0.37.3", -] - [[package]] name = "arbitrary" version = "1.4.2" @@ -149,6 +166,18 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "auditable-serde" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5" +dependencies = [ + "semver", + "serde", + "serde_json", + "topological-sort", +] + [[package]] name = "autocfg" version = "1.5.0" @@ -202,12 +231,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - [[package]] name = "base64" version = "0.22.1" @@ -220,6 +243,15 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" +[[package]] +name = "bitmaps" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" +dependencies = [ + "typenum", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -238,18 +270,90 @@ dependencies = [ "allocator-api2", ] -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - [[package]] name = "bytes" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" +[[package]] +name = "cap-fs-ext" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5528f85b1e134ae811704e41ef80930f56e795923f866813255bc342cc20654" +dependencies = [ + "cap-primitives", + "cap-std", + "io-lifetimes", + "windows-sys 0.52.0", +] + +[[package]] +name = "cap-net-ext" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20a158160765c6a7d0d8c072a53d772e4cb243f38b04bfcf6b4939cfbe7482e7" +dependencies = [ + "cap-primitives", + "cap-std", + "rustix 1.1.4", + "smallvec", +] + +[[package]] +name = "cap-primitives" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6cf3aea8a5081171859ef57bc1606b1df6999df4f1110f8eef68b30098d1d3a" +dependencies = [ + "ambient-authority", + "fs-set-times", + "io-extras", + "io-lifetimes", + "ipnet", + "maybe-owned", + "rustix 1.1.4", + "rustix-linux-procfs", + "windows-sys 0.52.0", + "winx", +] + +[[package]] +name = "cap-rand" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8144c22e24bbcf26ade86cb6501a0916c46b7e4787abdb0045a467eb1645a1d" +dependencies = [ + "ambient-authority", + "rand", +] + +[[package]] +name = "cap-std" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6dc3090992a735d23219de5c204927163d922f42f575a0189b005c62d37549a" +dependencies = [ + "cap-primitives", + "io-extras", + "io-lifetimes", + "rustix 1.1.4", +] + +[[package]] +name = "cap-time-ext" +version = "3.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "def102506ce40c11710a9b16e614af0cde8e76ae51b1f48c04b8d79f4b671a80" +dependencies = [ + "ambient-authority", + "cap-primitives", + "iana-time-zone", + "once_cell", + "rustix 1.1.4", + "winx", +] + [[package]] name = "cc" version = "1.2.56" @@ -291,6 +395,15 @@ dependencies = [ "thiserror 2.0.18", ] +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -315,32 +428,52 @@ dependencies = [ "libc", ] +[[package]] +name = "cranelift-assembler-x64" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40630d663279bc855bff805d6f5e8a0b6a1867f9df95b010511ac6dc894e9395" +dependencies = [ + "cranelift-assembler-x64-meta", +] + +[[package]] +name = "cranelift-assembler-x64-meta" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ee6aec5ceb55e5fdbcf7ef677d7c7195531360ff181ce39b2b31df11d57305f" +dependencies = [ + "cranelift-srcgen", +] + [[package]] name = "cranelift-bforest" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e15d04a0ce86cb36ead88ad68cf693ffd6cda47052b9e0ac114bc47fd9cd23c4" +checksum = "9a92d78cc3f087d7e7073828f08d98c7074a3a062b6b29a1b7783ce74305685e" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-bitset" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c6e3969a7ce267259ce244b7867c5d3bc9e65b0a87e81039588dfdeaede9f34" +checksum = "edcc73d756f2e0d7eda6144fe64a2bc69c624de893cb1be51f1442aed77881d2" dependencies = [ "serde", "serde_derive", + "wasmtime-internal-core", ] [[package]] name = "cranelift-codegen" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c22032c4cb42558371cf516bb47f26cdad1819d3475c133e93c49f50ebf304e" +checksum = "683d94c2cd0d73b41369b88da1129589bc3a2d99cf49979af1d14751f35b7a1b" dependencies = [ "bumpalo", + "cranelift-assembler-x64", "cranelift-bforest", "cranelift-bitset", "cranelift-codegen-meta", @@ -349,55 +482,63 @@ dependencies = [ "cranelift-entity", "cranelift-isle", "gimli", - "hashbrown 0.14.5", + "hashbrown 0.15.5", + "libm", "log", + "pulley-interpreter", "regalloc2", "rustc-hash", "serde", "smallvec", "target-lexicon", + "wasmtime-internal-core", ] [[package]] name = "cranelift-codegen-meta" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c904bc71c61b27fc57827f4a1379f29de64fe95653b620a3db77d59655eee0b8" +checksum = "235da0e52ee3a0052d0e944c3470ff025b1f4234f6ec4089d3109f2d2ffa6cbd" dependencies = [ + "cranelift-assembler-x64-meta", "cranelift-codegen-shared", + "cranelift-srcgen", + "heck", + "pulley-interpreter", ] [[package]] name = "cranelift-codegen-shared" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40180f5497572f644ce88c255480981ae2ec1d7bb4d8e0c0136a13b87a2f2ceb" +checksum = "20c07c6c440bd1bf920ff7597a1e743ede1f68dcd400730bd6d389effa7662af" [[package]] name = "cranelift-control" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d132c6d0bd8a489563472afc171759da0707804a65ece7ceb15a8c6d7dd5ef" +checksum = "8797c022e02521901e1aee483dea3ed3c67f2bf0a26405c9dd48e8ee7a70944b" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d0d9618275474fbf679dd018ac6e009acbd6ae6850f6a67be33fb3b00b323" +checksum = "59d8e72637246edd2cba337939850caa8b201f6315925ec4c156fdd089999699" dependencies = [ "cranelift-bitset", "serde", "serde_derive", + "wasmtime-internal-core", ] [[package]] name = "cranelift-frontend" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fac41e16729107393174b0c9e3730fb072866100e1e64e80a1a963b2e484d57" +checksum = "4c31db0085c3dfa131e739c3b26f9f9c84d69a9459627aac1ac4ef8355e3411b" dependencies = [ "cranelift-codegen", "log", @@ -407,21 +548,27 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca20d576e5070044d0a72a9effc2deacf4d6aa650403189d8ea50126483944d" +checksum = "524d804c1ebd8c542e6f64e71aa36934cec17c5da4a9ae3799796220317f5d23" [[package]] name = "cranelift-native" -version = "0.116.1" +version = "0.129.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8dee82f3f1f2c4cba9177f1cc5e350fe98764379bcd29340caa7b01f85076c7" +checksum = "dc9598f02540e382e1772416eba18e93c5275b746adbbf06ac1f3cf149415270" dependencies = [ "cranelift-codegen", "libc", "target-lexicon", ] +[[package]] +name = "cranelift-srcgen" +version = "0.129.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d953932541249c91e3fa70a75ff1e52adc62979a2a8132145d4b9b3e6d1a9b6a" + [[package]] name = "crc32fast" version = "1.5.0" @@ -466,6 +613,16 @@ dependencies = [ "typenum", ] +[[package]] +name = "ctor" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +dependencies = [ + "quote", + "syn", +] + [[package]] name = "debugid" version = "0.8.0" @@ -506,6 +663,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "either" version = "1.15.0" @@ -549,30 +717,51 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "fallible-iterator" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" - [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "fd-lock" +version = "4.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78" +dependencies = [ + "cfg-if", + "rustix 1.1.4", + "windows-sys 0.52.0", +] + [[package]] name = "find-msvc-tools" version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "fixedbitset" version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "fnv" version = "1.0.7" @@ -585,6 +774,41 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs-set-times" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94e7099f6313ecacbe1256e8ff9d617b75d1bcb16a6fddef94866d225a01a14a" +dependencies = [ + "io-lifetimes", + "rustix 1.1.4", + "windows-sys 0.52.0", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.31" @@ -592,6 +816,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", + "futures-sink", ] [[package]] @@ -600,6 +825,23 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + [[package]] name = "futures-macro" version = "0.3.31" @@ -629,33 +871,29 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", + "futures-io", "futures-macro", + "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", "slab", ] -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - [[package]] name = "fxprof-processed-profile" -version = "0.6.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27d12c0aed7f1e24276a241aadc4cb8ea9f83000f34bc062b7cc2d51e3b0fabd" +checksum = "25234f20a3ec0a962a61770cfe39ecf03cb529a6e474ad8cff025ed497eda557" dependencies = [ "bitflags", "debugid", - "fxhash", + "rustc-hash", "serde", + "serde_derive", "serde_json", ] @@ -707,11 +945,12 @@ dependencies = [ [[package]] name = "gimli" -version = "0.31.1" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "0bf7f043f89559805f8c7cacc432749b2fa0d0a0a9ee46ce47164ed5ba7f126c" dependencies = [ - "fallible-iterator", + "fnv", + "hashbrown 0.16.1", "indexmap 2.13.0", "stable_deref_trait", ] @@ -741,15 +980,6 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash", -] - [[package]] name = "hashbrown" version = "0.15.5" @@ -898,62 +1128,191 @@ dependencies = [ ] [[package]] -name = "id-arena" -version = "2.3.0" +name = "icu_collections" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] [[package]] -name = "indexmap" -version = "1.9.3" +name = "icu_locale_core" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ - "autocfg", - "hashbrown 0.12.3", + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", ] [[package]] -name = "indexmap" -version = "2.13.0" +name = "icu_normalizer" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", ] [[package]] -name = "itertools" -version = "0.12.1" +name = "icu_normalizer_data" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] -name = "itertools" -version = "0.14.0" +name = "icu_properties" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" dependencies = [ - "either", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", ] [[package]] -name = "itoa" -version = "1.0.17" +name = "icu_properties_data" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" [[package]] -name = "ittapi" -version = "0.4.0" +name = "icu_provider" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b996fe614c41395cdaedf3cf408a9534851090959d90d54a535f675550b64b1" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "im-rc" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe" +dependencies = [ + "bitmaps", + "rand_core", + "rand_xoshiro", + "sized-chunks", + "typenum", + "version_check", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "io-extras" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65" +dependencies = [ + "io-lifetimes", + "windows-sys 0.52.0", +] + +[[package]] +name = "io-lifetimes" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06432fb54d3be7964ecd3649233cddf80db2832f47fec34c01f65b3d9d774983" + +[[package]] +name = "ipnet" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "ittapi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b996fe614c41395cdaedf3cf408a9534851090959d90d54a535f675550b64b1" dependencies = [ "anyhow", "ittapi-sys", @@ -1007,6 +1366,16 @@ version = "0.2.182" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" +[[package]] +name = "libloading" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" +dependencies = [ + "cfg-if", + "windows-link", +] + [[package]] name = "libm" version = "0.2.16" @@ -1034,6 +1403,12 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + [[package]] name = "log" version = "0.4.29" @@ -1055,6 +1430,12 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +[[package]] +name = "maybe-owned" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4facc753ae494aeb6e3c22f839b158aebd4f9270f55cd3c79906c45476c47ab4" + [[package]] name = "memchr" version = "2.8.0" @@ -1076,6 +1457,16 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + [[package]] name = "mio" version = "1.1.1" @@ -1094,24 +1485,72 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" [[package]] -name = "num-traits" -version = "0.2.19" +name = "napi" +version = "2.16.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +checksum = "55740c4ae1d8696773c78fdafd5d0e5fe9bc9f1b071c7ba493ba5c413a9184f3" dependencies = [ - "autocfg", + "bitflags", + "ctor", + "napi-derive", + "napi-sys", + "once_cell", + "serde", + "serde_json", + "tokio", ] [[package]] -name = "object" -version = "0.36.7" +name = "napi-build" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "d376940fd5b723c6893cd1ee3f33abbfd86acb1cd1ec079f3ab04a2a3bc4d3b1" + +[[package]] +name = "napi-derive" +version = "2.16.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cbe2585d8ac223f7d34f13701434b9d5f4eb9c332cccce8dee57ea18ab8ab0c" dependencies = [ - "crc32fast", - "hashbrown 0.15.5", - "indexmap 2.13.0", - "memchr", + "cfg-if", + "convert_case", + "napi-derive-backend", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "napi-derive-backend" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1639aaa9eeb76e91c6ae66da8ce3e89e921cd3885e99ec85f4abacae72fc91bf" +dependencies = [ + "convert_case", + "once_cell", + "proc-macro2", + "quote", + "regex", + "semver", + "syn", +] + +[[package]] +name = "napi-sys" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "427802e8ec3a734331fec1035594a210ce1ff4dc5bc1950530920ab717964ea3" +dependencies = [ + "libloading", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", ] [[package]] @@ -1120,6 +1559,9 @@ version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ + "crc32fast", + "hashbrown 0.15.5", + "indexmap 2.13.0", "memchr", ] @@ -1129,25 +1571,29 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - [[package]] name = "percent-encoding" version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" +[[package]] +name = "petgraph" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" +dependencies = [ + "fixedbitset 0.4.2", + "indexmap 2.13.0", +] + [[package]] name = "petgraph" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3672b37090dbd86368a4145bc067582552b29c27377cad4e0a306c97f9bd7772" dependencies = [ - "fixedbitset", + "fixedbitset 0.5.7", "indexmap 2.13.0", ] @@ -1207,6 +1653,15 @@ dependencies = [ "serde", ] +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + [[package]] name = "ppv-lite86" version = "0.2.21" @@ -1252,11 +1707,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be769465445e8c1474e9c5dac2018218498557af32d9ed057325ec9a41ae81bf" dependencies = [ "heck", - "itertools 0.14.0", + "itertools", "log", "multimap", "once_cell", - "petgraph", + "petgraph 0.7.1", "prettyplease", "prost", "prost-types", @@ -1272,7 +1727,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.14.0", + "itertools", "proc-macro2", "quote", "syn", @@ -1288,32 +1743,33 @@ dependencies = [ ] [[package]] -name = "psm" -version = "0.1.30" +name = "pulley-interpreter" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8" +checksum = "bc2d61e068654529dc196437f8df0981db93687fdc67dec6a5de92363120b9da" dependencies = [ - "ar_archive_writer", - "cc", + "cranelift-bitset", + "log", + "pulley-macros", + "wasmtime-internal-core", ] [[package]] -name = "pulley-interpreter" -version = "29.0.1" +name = "pulley-macros" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62d95f8575df49a2708398182f49a888cf9dc30210fb1fd2df87c889edcee75d" +checksum = "c3f210c61b6ecfaebbba806b6d9113a222519d4e5cc4ab2d5ecca047bb7927ae" dependencies = [ - "cranelift-bitset", - "log", - "sptr", - "wasmtime-math", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "pyo3" -version = "0.28.1" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c738662e2181be11cb82487628404254902bb3225d8e9e99c31f3ef82a405c" +checksum = "cf85e27e86080aafd5a22eae58a162e133a589551542b3e5cee4beb27e54f8e1" dependencies = [ "libc", "once_cell", @@ -1339,9 +1795,9 @@ dependencies = [ [[package]] name = "pyo3-build-config" -version = "0.28.1" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9ca0864a7dd3c133a7f3f020cbff2e12e88420da854c35540fd20ce2d60e435" +checksum = "8bf94ee265674bf76c09fa430b0e99c26e319c945d96ca0d5a8215f31bf81cf7" dependencies = [ "python3-dll-a", "target-lexicon", @@ -1349,9 +1805,9 @@ dependencies = [ [[package]] name = "pyo3-ffi" -version = "0.28.1" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dfc1956b709823164763a34cc42bbfd26b8730afa77809a3df8b94a3ae3b059" +checksum = "491aa5fc66d8059dd44a75f4580a2962c1862a1c2945359db36f6c2818b748dc" dependencies = [ "libc", "pyo3-build-config", @@ -1370,9 +1826,9 @@ dependencies = [ [[package]] name = "pyo3-macros" -version = "0.28.1" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29dc660ad948bae134d579661d08033fbb1918f4529c3bbe3257a68f2009ddf2" +checksum = "f5d671734e9d7a43449f8480f8b38115df67bef8d21f76837fa75ee7aaa5e52e" dependencies = [ "proc-macro2", "pyo3-macros-backend", @@ -1382,9 +1838,9 @@ dependencies = [ [[package]] name = "pyo3-macros-backend" -version = "0.28.1" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78cd6c6d718acfcedf26c3d21fe0f053624368b0d44298c55d7138fde9331f7" +checksum = "22faaa1ce6c430a1f71658760497291065e6450d7b5dc2bcf254d49f66ee700a" dependencies = [ "heck", "proc-macro2", @@ -1447,6 +1903,15 @@ dependencies = [ "getrandom 0.2.17", ] +[[package]] +name = "rand_xoshiro" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" +dependencies = [ + "rand_core", +] + [[package]] name = "rayon" version = "1.11.0" @@ -1480,9 +1945,9 @@ dependencies = [ [[package]] name = "regalloc2" -version = "0.11.2" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc06e6b318142614e4a48bc725abbf08ff166694835c43c9dae5a9009704639a" +checksum = "08effbc1fa53aaebff69521a5c05640523fab037b34a4a2c109506bc938246fa" dependencies = [ "allocator-api2", "bumpalo", @@ -1543,7 +2008,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1559,12 +2024,28 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "rustix-linux-procfs" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fc84bf7e9aa16c4f2c758f27412dc9841341e16aa682d9c7ac308fe3ee12056" +dependencies = [ + "once_cell", + "rustix 1.1.4", +] + [[package]] name = "rustversion" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" +[[package]] +name = "ryu" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" + [[package]] name = "semver" version = "1.0.27" @@ -1627,6 +2108,28 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" +dependencies = [ + "serde_core", +] + +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap 2.13.0", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + [[package]] name = "sha2" version = "0.10.9" @@ -1644,6 +2147,22 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "sized-chunks" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" +dependencies = [ + "bitmaps", + "typenum", +] + [[package]] name = "slab" version = "0.4.12" @@ -1680,10 +2199,13 @@ dependencies = [ ] [[package]] -name = "sptr" -version = "0.3.2" +name = "spdx" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" +dependencies = [ + "smallvec", +] [[package]] name = "stable_deref_trait" @@ -1708,6 +2230,33 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-interface" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc4592f674ce18521c2a81483873a49596655b179f71c5e05d10c1fe66c78745" +dependencies = [ + "bitflags", + "cap-fs-ext", + "cap-std", + "fd-lock", + "io-lifetimes", + "rustix 0.38.44", + "windows-sys 0.52.0", + "winx", +] + [[package]] name = "target-lexicon" version = "0.13.4" @@ -1776,6 +2325,16 @@ dependencies = [ "syn", ] +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tokio" version = "1.49.0" @@ -1833,20 +2392,44 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", - "serde_spanned", - "toml_datetime", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", "toml_edit", ] [[package]] -name = "toml_datetime" -version = "0.6.11" +name = "toml" +version = "0.9.12+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863" dependencies = [ - "serde", -] - + "indexmap 2.13.0", + "serde_core", + "serde_spanned 1.0.4", + "toml_datetime 0.7.5+spec-1.1.0", + "toml_parser", + "toml_writer", + "winnow", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.5+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +dependencies = [ + "serde_core", +] + [[package]] name = "toml_edit" version = "0.22.27" @@ -1855,18 +2438,33 @@ checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap 2.13.0", "serde", - "serde_spanned", - "toml_datetime", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", "toml_write", "winnow", ] +[[package]] +name = "toml_parser" +version = "1.0.9+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "702d4415e08923e7e1ef96cd5727c0dfed80b4d2fa25db9647fe5eb6f7c5a4c4" +dependencies = [ + "winnow", +] + [[package]] name = "toml_write" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" +[[package]] +name = "toml_writer" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" + [[package]] name = "tonic" version = "0.12.3" @@ -1876,7 +2474,7 @@ dependencies = [ "async-stream", "async-trait", "axum", - "base64 0.22.1", + "base64", "bytes", "h2", "http", @@ -1911,6 +2509,12 @@ dependencies = [ "syn", ] +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + [[package]] name = "tower" version = "0.4.13" @@ -1988,17 +2592,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "trait-variant" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70977707304198400eb4835a78f6a9f928bf41bba420deb8fdb175cd965d77a7" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - [[package]] name = "try-lock" version = "0.2.5" @@ -2017,6 +2610,12 @@ version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e" +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + [[package]] name = "unicode-width" version = "0.2.2" @@ -2029,6 +2628,30 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "uuid" version = "1.21.0" @@ -2067,7 +2690,7 @@ version = "1.0.2+wasi-0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" dependencies = [ - "wit-bindgen", + "wit-bindgen 0.51.0", ] [[package]] @@ -2076,7 +2699,7 @@ version = "0.4.0+wasi-0.3.0-rc-2026-01-06" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" dependencies = [ - "wit-bindgen", + "wit-bindgen 0.51.0", ] [[package]] @@ -2124,14 +2747,35 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-compose" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92cda9c76ca8dcac01a8b497860c2cb15cd6f216dc07060517df5abbe82512ac" +dependencies = [ + "anyhow", + "heck", + "im-rc", + "indexmap 2.13.0", + "log", + "petgraph 0.6.5", + "serde", + "serde_derive", + "serde_yaml", + "smallvec", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", + "wat", +] + [[package]] name = "wasm-encoder" -version = "0.221.3" +version = "0.227.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc8444fe4920de80a4fe5ab564fff2ae58b6b73166b89751f8c6c93509da32e5" +checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822" dependencies = [ - "leb128", - "wasmparser 0.221.3", + "leb128fmt", + "wasmparser 0.227.1", ] [[package]] @@ -2154,6 +2798,25 @@ dependencies = [ "wasmparser 0.245.1", ] +[[package]] +name = "wasm-metadata" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d" +dependencies = [ + "anyhow", + "auditable-serde", + "flate2", + "indexmap 2.13.0", + "serde", + "serde_derive", + "serde_json", + "spdx", + "url", + "wasm-encoder 0.227.1", + "wasmparser 0.227.1", +] + [[package]] name = "wasm-metadata" version = "0.244.0" @@ -2168,15 +2831,14 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.221.3" +version = "0.227.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d06bfa36ab3ac2be0dee563380147a5b81ba10dd8885d7fbbc9eb574be67d185" +checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" dependencies = [ "bitflags", "hashbrown 0.15.5", "indexmap 2.13.0", "semver", - "serde", ] [[package]] @@ -2189,6 +2851,7 @@ dependencies = [ "hashbrown 0.15.5", "indexmap 2.13.0", "semver", + "serde", ] [[package]] @@ -2204,130 +2867,154 @@ dependencies = [ [[package]] name = "wasmprinter" -version = "0.221.3" +version = "0.244.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7343c42a97f2926c7819ff81b64012092ae954c5d83ddd30c9fcdefd97d0b283" +checksum = "09390d7b2bd7b938e563e4bff10aa345ef2e27a3bc99135697514ef54495e68f" dependencies = [ "anyhow", "termcolor", - "wasmparser 0.221.3", + "wasmparser 0.244.0", ] [[package]] name = "wasmtime" -version = "29.0.1" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11976a250672556d1c4c04c6d5d7656ac9192ac9edc42a4587d6c21460010e69" +checksum = "39bef52be4fb4c5b47d36f847172e896bc94b35c9c6a6f07117686bd16ed89a7" dependencies = [ "addr2line", - "anyhow", "async-trait", "bitflags", "bumpalo", "cc", "cfg-if", "encoding_rs", + "futures", "fxprof-processed-profile", "gimli", - "hashbrown 0.14.5", - "indexmap 2.13.0", "ittapi", "libc", "log", "mach2", "memfd", - "object 0.36.7", + "object", "once_cell", - "paste", "postcard", - "psm", "pulley-interpreter", "rayon", - "rustix 0.38.44", + "rustix 1.1.4", "semver", "serde", "serde_derive", "serde_json", "smallvec", - "sptr", "target-lexicon", - "trait-variant", - "wasm-encoder 0.221.3", - "wasmparser 0.221.3", - "wasmtime-asm-macros", - "wasmtime-cache", - "wasmtime-component-macro", - "wasmtime-component-util", - "wasmtime-cranelift", + "tempfile", + "wasm-compose", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", "wasmtime-environ", - "wasmtime-fiber", - "wasmtime-jit-debug", - "wasmtime-jit-icache-coherence", - "wasmtime-math", - "wasmtime-slab", - "wasmtime-versioned-export-macros", - "wasmtime-winch", + "wasmtime-internal-cache", + "wasmtime-internal-component-macro", + "wasmtime-internal-component-util", + "wasmtime-internal-core", + "wasmtime-internal-cranelift", + "wasmtime-internal-fiber", + "wasmtime-internal-jit-debug", + "wasmtime-internal-jit-icache-coherence", + "wasmtime-internal-unwinder", + "wasmtime-internal-versioned-export-macros", + "wasmtime-internal-winch", "wat", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] -name = "wasmtime-asm-macros" -version = "29.0.1" +name = "wasmtime-environ" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f178b0d125201fbe9f75beaf849bd3e511891f9e45ba216a5b620802ccf64f2" +checksum = "bb637d5aa960ac391ca5a4cbf3e45807632e56beceeeb530e14dfa67fdfccc62" dependencies = [ - "cfg-if", + "anyhow", + "cpp_demangle", + "cranelift-bitset", + "cranelift-entity", + "gimli", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "log", + "object", + "postcard", + "rustc-demangle", + "semver", + "serde", + "serde_derive", + "smallvec", + "target-lexicon", + "wasm-encoder 0.244.0", + "wasmparser 0.244.0", + "wasmprinter", + "wasmtime-internal-component-util", + "wasmtime-internal-core", ] [[package]] -name = "wasmtime-cache" -version = "29.0.1" +name = "wasmtime-internal-cache" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1161c8f62880deea07358bc40cceddc019f1c81d46007bc390710b2fe24ffc" +checksum = "4ab6c428c610ae3e7acd25ca2681b4d23672c50d8769240d9dda99b751d4deec" dependencies = [ - "anyhow", - "base64 0.21.7", + "base64", "directories-next", "log", "postcard", - "rustix 0.38.44", + "rustix 1.1.4", "serde", "serde_derive", "sha2", - "toml", - "windows-sys 0.59.0", + "toml 0.9.12+spec-1.1.0", + "wasmtime-environ", + "windows-sys 0.61.2", "zstd", ] [[package]] -name = "wasmtime-component-macro" -version = "29.0.1" +name = "wasmtime-internal-component-macro" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d74de6592ed945d0a602f71243982a304d5d02f1e501b638addf57f42d57dfaf" +checksum = "ca768b11d5e7de017e8c3d4d444da6b4ce3906f565bcbc253d76b4ecbb5d2869" dependencies = [ "anyhow", "proc-macro2", "quote", "syn", - "wasmtime-component-util", - "wasmtime-wit-bindgen", - "wit-parser 0.221.3", + "wasmtime-internal-component-util", + "wasmtime-internal-wit-bindgen", + "wit-parser 0.244.0", ] [[package]] -name = "wasmtime-component-util" -version = "29.0.1" +name = "wasmtime-internal-component-util" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707dc7b3c112ab5a366b30cfe2fb5b2f8e6a0f682f16df96a5ec582bfe6f056e" +checksum = "763f504faf96c9b409051e96a1434655eea7f56a90bed9cb1e22e22c941253fd" [[package]] -name = "wasmtime-cranelift" -version = "29.0.1" +name = "wasmtime-internal-core" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "366be722674d4bf153290fbcbc4d7d16895cc82fb3e869f8d550ff768f9e9e87" +checksum = "03a4a3f055a804a2f3d86e816a9df78a8fa57762212a8506164959224a40cd48" dependencies = [ "anyhow", + "libm", +] + +[[package]] +name = "wasmtime-internal-cranelift" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55154a91d22ad51f9551124ce7fb49ddddc6a82c4910813db4c790c97c9ccf32" +dependencies = [ "cfg-if", "cranelift-codegen", "cranelift-control", @@ -2335,102 +3022,77 @@ dependencies = [ "cranelift-frontend", "cranelift-native", "gimli", - "itertools 0.12.1", + "itertools", "log", - "object 0.36.7", + "object", + "pulley-interpreter", "smallvec", "target-lexicon", - "thiserror 1.0.69", - "wasmparser 0.221.3", + "thiserror 2.0.18", + "wasmparser 0.244.0", "wasmtime-environ", - "wasmtime-versioned-export-macros", + "wasmtime-internal-core", + "wasmtime-internal-unwinder", + "wasmtime-internal-versioned-export-macros", ] [[package]] -name = "wasmtime-environ" -version = "29.0.1" +name = "wasmtime-internal-fiber" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdadc1af7097347aa276a4f008929810f726b5b46946971c660b6d421e9994ad" +checksum = "05decfad1021ad2efcca5c1be9855acb54b6ee7158ac4467119b30b7481508e3" dependencies = [ - "anyhow", - "cpp_demangle", - "cranelift-bitset", - "cranelift-entity", - "gimli", - "indexmap 2.13.0", - "log", - "object 0.36.7", - "postcard", - "rustc-demangle", - "semver", - "serde", - "serde_derive", - "smallvec", - "target-lexicon", - "wasm-encoder 0.221.3", - "wasmparser 0.221.3", - "wasmprinter", - "wasmtime-component-util", -] - -[[package]] -name = "wasmtime-fiber" -version = "29.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccba90d4119f081bca91190485650730a617be1fff5228f8c4757ce133d21117" -dependencies = [ - "anyhow", "cc", "cfg-if", - "rustix 0.38.44", - "wasmtime-asm-macros", - "wasmtime-versioned-export-macros", - "windows-sys 0.59.0", + "libc", + "rustix 1.1.4", + "wasmtime-environ", + "wasmtime-internal-versioned-export-macros", + "windows-sys 0.61.2", ] [[package]] -name = "wasmtime-jit-debug" -version = "29.0.1" +name = "wasmtime-internal-jit-debug" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e7b61488a5ee00c35c8c22de707c36c0aecacf419a3be803a6a2ba5e860f56a" +checksum = "924980c50427885fd4feed2049b88380178e567768aaabf29045b02eb262eaa7" dependencies = [ - "object 0.36.7", - "rustix 0.38.44", - "wasmtime-versioned-export-macros", + "cc", + "object", + "rustix 1.1.4", + "wasmtime-internal-versioned-export-macros", ] [[package]] -name = "wasmtime-jit-icache-coherence" -version = "29.0.1" +name = "wasmtime-internal-jit-icache-coherence" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec5e8552e01692e6c2e5293171704fed8abdec79d1a6995a0870ab190e5747d1" +checksum = "c57d24e8d1334a0e5a8b600286ffefa1fc4c3e8176b110dff6fbc1f43c4a599b" dependencies = [ - "anyhow", "cfg-if", "libc", - "windows-sys 0.59.0", + "wasmtime-internal-core", + "windows-sys 0.61.2", ] [[package]] -name = "wasmtime-math" -version = "29.0.1" +name = "wasmtime-internal-unwinder" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29210ec2aa25e00f4d54605cedaf080f39ec01a872c5bd520ad04c67af1dde17" +checksum = "3a1a144bd4393593a868ba9df09f34a6a360cb5db6e71815f20d3f649c6e6735" dependencies = [ - "libm", + "cfg-if", + "cranelift-codegen", + "log", + "object", + "wasmtime-environ", ] [[package]] -name = "wasmtime-slab" -version = "29.0.1" +name = "wasmtime-internal-versioned-export-macros" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb5821a96fa04ac14bc7b158bb3d5cd7729a053db5a74dad396cd513a5e5ccf" - -[[package]] -name = "wasmtime-versioned-export-macros" -version = "29.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ff86db216dc0240462de40c8290887a613dddf9685508eb39479037ba97b5b" +checksum = "9a6948b56bb00c62dbd205ea18a4f1ceccbe1e4b8479651fdb0bab2553790f20" dependencies = [ "proc-macro2", "quote", @@ -2438,32 +3100,85 @@ dependencies = [ ] [[package]] -name = "wasmtime-winch" -version = "29.0.1" +name = "wasmtime-internal-winch" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdbabfb8f20502d5e1d81092b9ead3682ae59988487aafcd7567387b7a43cf8f" +checksum = "9130b3ab6fb01be80b27b9a2c84817af29ae8224094f2503d2afa9fea5bf9d00" dependencies = [ - "anyhow", "cranelift-codegen", "gimli", - "object 0.36.7", + "log", + "object", "target-lexicon", - "wasmparser 0.221.3", - "wasmtime-cranelift", + "wasmparser 0.244.0", "wasmtime-environ", + "wasmtime-internal-cranelift", "winch-codegen", ] [[package]] -name = "wasmtime-wit-bindgen" -version = "29.0.1" +name = "wasmtime-internal-wit-bindgen" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8358319c2dd1e4db79e3c1c5d3a5af84956615343f9f89f4e4996a36816e06e6" +checksum = "102d0d70dbfede00e4cc9c24e86df6d32c03bf6f5ad06b5d6c76b0a4a5004c4a" dependencies = [ "anyhow", + "bitflags", "heck", "indexmap 2.13.0", - "wit-parser 0.221.3", + "wit-parser 0.244.0", +] + +[[package]] +name = "wasmtime-wasi" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea938f6f4f11e5ffe6d8b6f34c9a994821db9511c3e9c98e535896f27d06bb92" +dependencies = [ + "async-trait", + "bitflags", + "bytes", + "cap-fs-ext", + "cap-net-ext", + "cap-rand", + "cap-std", + "cap-time-ext", + "fs-set-times", + "futures", + "io-extras", + "io-lifetimes", + "rustix 1.1.4", + "system-interface", + "thiserror 2.0.18", + "tokio", + "tracing", + "url", + "wasmtime", + "wasmtime-wasi-io", + "wiggle", + "windows-sys 0.61.2", +] + +[[package]] +name = "wasmtime-wasi-io" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71cb16a88d0443b509d6eca4298617233265179090abf03e0a8042b9b251e9da" +dependencies = [ + "async-trait", + "bytes", + "futures", + "tracing", + "wasmtime", +] + +[[package]] +name = "wast" +version = "35.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ef140f1b49946586078353a453a1d28ba90adfc54dde75710bc1931de204d68" +dependencies = [ + "leb128", ] [[package]] @@ -2485,7 +3200,47 @@ version = "1.245.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd48d1679b6858988cb96b154dda0ec5bbb09275b71db46057be37332d5477be" dependencies = [ - "wast", + "wast 245.0.1", +] + +[[package]] +name = "wiggle" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dca2bf96d20f0c70e6741cc6c8c1a9ee4c3c0310c7ad1971242628c083cc9a5" +dependencies = [ + "bitflags", + "thiserror 2.0.18", + "tracing", + "wasmtime", + "wasmtime-environ", + "wiggle-macro", +] + +[[package]] +name = "wiggle-generate" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0d8c016d6d3ec6dc6b8c80c23cede4ee2386ccf347d01984f7991d7659f73ef" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", + "wasmtime-environ", + "witx", +] + +[[package]] +name = "wiggle-macro" +version = "42.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91a267096e48857096f035fffca29e22f0bbe840af4d74a6725eb695e1782110" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wiggle-generate", ] [[package]] @@ -2521,20 +3276,21 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "winch-codegen" -version = "29.0.1" +version = "42.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f849ef2c5f46cb0a20af4b4487aaa239846e52e2c03f13fa3c784684552859c" +checksum = "1977857998e4dd70d26e2bfc0618a9684a2fb65b1eca174dc13f3b3e9c2159ca" dependencies = [ - "anyhow", + "cranelift-assembler-x64", "cranelift-codegen", "gimli", "regalloc2", "smallvec", "target-lexicon", - "thiserror 1.0.69", - "wasmparser 0.221.3", - "wasmtime-cranelift", + "thiserror 2.0.18", + "wasmparser 0.244.0", "wasmtime-environ", + "wasmtime-internal-core", + "wasmtime-internal-cranelift", ] [[package]] @@ -2605,15 +3361,6 @@ dependencies = [ "windows-targets 0.52.6", ] -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - [[package]] name = "windows-sys" version = "0.60.2" @@ -2770,13 +3517,44 @@ dependencies = [ "memchr", ] +[[package]] +name = "winx" +version = "0.36.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d" +dependencies = [ + "bitflags", + "windows-sys 0.52.0", +] + +[[package]] +name = "wit-bindgen" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de" +dependencies = [ + "wit-bindgen-rt", + "wit-bindgen-rust-macro 0.41.0", +] + [[package]] name = "wit-bindgen" version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" dependencies = [ - "wit-bindgen-rust-macro", + "wit-bindgen-rust-macro 0.51.0", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b" +dependencies = [ + "anyhow", + "heck", + "wit-parser 0.227.1", ] [[package]] @@ -2790,6 +3568,33 @@ dependencies = [ "wit-parser 0.244.0", ] +[[package]] +name = "wit-bindgen-rt" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" +dependencies = [ + "bitflags", + "futures", + "once_cell", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" +dependencies = [ + "anyhow", + "heck", + "indexmap 2.13.0", + "prettyplease", + "syn", + "wasm-metadata 0.227.1", + "wit-bindgen-core 0.41.0", + "wit-component 0.227.1", +] + [[package]] name = "wit-bindgen-rust" version = "0.51.0" @@ -2801,9 +3606,24 @@ dependencies = [ "indexmap 2.13.0", "prettyplease", "syn", - "wasm-metadata", - "wit-bindgen-core", - "wit-component", + "wasm-metadata 0.244.0", + "wit-bindgen-core 0.51.0", + "wit-component 0.244.0", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core 0.41.0", + "wit-bindgen-rust 0.41.0", ] [[package]] @@ -2817,8 +3637,27 @@ dependencies = [ "proc-macro2", "quote", "syn", - "wit-bindgen-core", - "wit-bindgen-rust", + "wit-bindgen-core 0.51.0", + "wit-bindgen-rust 0.51.0", +] + +[[package]] +name = "wit-component" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" +dependencies = [ + "anyhow", + "bitflags", + "indexmap 2.13.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder 0.227.1", + "wasm-metadata 0.227.1", + "wasmparser 0.227.1", + "wit-parser 0.227.1", ] [[package]] @@ -2835,16 +3674,16 @@ dependencies = [ "serde_derive", "serde_json", "wasm-encoder 0.244.0", - "wasm-metadata", + "wasm-metadata 0.244.0", "wasmparser 0.244.0", "wit-parser 0.244.0", ] [[package]] name = "wit-parser" -version = "0.221.3" +version = "0.227.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "896112579ed56b4a538b07a3d16e562d101ff6265c46b515ce0c701eef16b2ac" +checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" dependencies = [ "anyhow", "id-arena", @@ -2855,7 +3694,7 @@ dependencies = [ "serde_derive", "serde_json", "unicode-xid", - "wasmparser 0.221.3", + "wasmparser 0.227.1", ] [[package]] @@ -2876,6 +3715,47 @@ dependencies = [ "wasmparser 0.244.0", ] +[[package]] +name = "witx" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e366f27a5cabcddb2706a78296a40b8fcc451e1a6aba2fc1d94b4a01bdaaef4b" +dependencies = [ + "anyhow", + "log", + "thiserror 1.0.69", + "wast 35.0.2", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.8.40" @@ -2896,6 +3776,60 @@ dependencies = [ "syn", ] +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "zmij" version = "1.0.21" diff --git a/Cargo.toml b/Cargo.toml index 52c19fd..865e8ee 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,9 @@ [workspace] members = [ "crates/amplifier-core", + "crates/amplifier-guest", "bindings/python", + "bindings/node", ] resolver = "2" diff --git a/bindings/node/.gitignore b/bindings/node/.gitignore new file mode 100644 index 0000000..16bc516 --- /dev/null +++ b/bindings/node/.gitignore @@ -0,0 +1,10 @@ +# Build artifacts (generated by napi-rs) +*.node +index.js +index.d.ts + +# Dependencies +node_modules/ + +# Rust build output +target/ diff --git a/bindings/node/Cargo.toml b/bindings/node/Cargo.toml new file mode 100644 index 0000000..e4c1da4 --- /dev/null +++ b/bindings/node/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "amplifier-core-node" +version = "1.0.10" +edition = "2021" +description = "Napi-RS bridge for amplifier-core Rust kernel" +license = "MIT" +publish = false + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-core = { path = "../../crates/amplifier-core", features = ["wasm"] } +napi = { version = "2", features = ["async", "serde-json", "napi9"] } +napi-derive = "2" +tokio = { version = "1", features = ["rt-multi-thread"] } +serde_json = "1" +log = "0.4" + +[build-dependencies] +napi-build = "2" diff --git a/bindings/node/__tests__/cancellation.test.ts b/bindings/node/__tests__/cancellation.test.ts new file mode 100644 index 0000000..8f12d70 --- /dev/null +++ b/bindings/node/__tests__/cancellation.test.ts @@ -0,0 +1,60 @@ +import { describe, it, expect } from 'vitest' +import { JsCancellationToken } from '../index.js' + +describe('JsCancellationToken', () => { + it('creates with default state (not cancelled, not graceful, not immediate)', () => { + const token = new JsCancellationToken() + expect(token.isCancelled).toBe(false) + expect(token.isGraceful).toBe(false) + expect(token.isImmediate).toBe(false) + }) + + it('requestGraceful transitions to graceful', () => { + const token = new JsCancellationToken() + token.requestGraceful() + expect(token.isCancelled).toBe(true) + expect(token.isGraceful).toBe(true) + expect(token.isImmediate).toBe(false) + }) + + it('requestImmediate transitions to immediate', () => { + const token = new JsCancellationToken() + token.requestImmediate() + expect(token.isCancelled).toBe(true) + expect(token.isImmediate).toBe(true) + }) + + it('graceful then immediate escalates', () => { + const token = new JsCancellationToken() + token.requestGraceful() + expect(token.isCancelled).toBe(true) + expect(token.isGraceful).toBe(true) + token.requestImmediate() + expect(token.isCancelled).toBe(true) + expect(token.isImmediate).toBe(true) + }) + + it('reset returns to uncancelled state', () => { + const token = new JsCancellationToken() + token.requestGraceful() + expect(token.isCancelled).toBe(true) + token.reset() + expect(token.isCancelled).toBe(false) + expect(token.isGraceful).toBe(false) + expect(token.isImmediate).toBe(false) + }) + + it('requestGraceful accepts optional reason string', () => { + const token = new JsCancellationToken() + token.requestGraceful('user requested stop') + expect(token.isCancelled).toBe(true) + expect(token.isGraceful).toBe(true) + }) + + it('requestImmediate accepts optional reason string', () => { + const token = new JsCancellationToken() + token.requestImmediate('timeout exceeded') + expect(token.isCancelled).toBe(true) + expect(token.isImmediate).toBe(true) + }) +}) diff --git a/bindings/node/__tests__/coordinator.test.ts b/bindings/node/__tests__/coordinator.test.ts new file mode 100644 index 0000000..747b07d --- /dev/null +++ b/bindings/node/__tests__/coordinator.test.ts @@ -0,0 +1,84 @@ +import { describe, it, expect } from 'vitest' +import { JsCoordinator } from '../index.js' +import { emptyConfig } from './fixtures' + +describe('JsCoordinator', () => { + it('creates with empty config (toolNames=[], providerNames=[], hasOrchestrator=false, hasContext=false)', () => { + const coord = new JsCoordinator(emptyConfig) + expect(coord.toolNames).toEqual([]) + expect(coord.providerNames).toEqual([]) + expect(coord.hasOrchestrator).toBe(false) + expect(coord.hasContext).toBe(false) + }) + + it('throws on invalid JSON config', () => { + expect(() => new JsCoordinator('invalid json')).toThrow() + }) + + it('registers and retrieves capabilities (registerCapability + getCapability roundtrip)', () => { + const coord = new JsCoordinator(emptyConfig) + coord.registerCapability('streaming', JSON.stringify({ enabled: true })) + const result = coord.getCapability('streaming') + expect(result).not.toBeNull() + const parsed = JSON.parse(result as string) + expect(parsed).toEqual({ enabled: true }) + }) + + it('getCapability returns null for missing', () => { + const coord = new JsCoordinator(emptyConfig) + const result = coord.getCapability('nonexistent') + expect(result).toBeNull() + }) + + // createHookRegistry() creates a NEW detached instance each call — this is the + // known limitation documented by the rename from `.hooks` getter. Use a + // shared JsHookRegistry if you need persistent hook registration. + it('createHookRegistry() returns a JsHookRegistry with listHandlers', () => { + const coord = new JsCoordinator(emptyConfig) + const hooks = coord.createHookRegistry() + expect(hooks).toBeDefined() + expect(typeof hooks.listHandlers).toBe('function') + }) + + it('createHookRegistry creates a new instance each call (pins detached behavior)', () => { + const coord = new JsCoordinator(emptyConfig) + const h1 = coord.createHookRegistry() + const h2 = coord.createHookRegistry() + expect(h1).not.toBe(h2) + }) + + it('provides access to cancellation subsystem (coord.cancellation.isCancelled === false)', () => { + const coord = new JsCoordinator(emptyConfig) + const cancellation = coord.cancellation + expect(cancellation).toBeDefined() + expect(cancellation.isCancelled).toBe(false) + }) + + it('resetTurn resets turn tracking (should not throw)', () => { + const coord = new JsCoordinator(emptyConfig) + expect(() => coord.resetTurn()).not.toThrow() + }) + + it('toDict returns coordinator state (has tools, providers, has_orchestrator, has_context, capabilities)', () => { + const coord = new JsCoordinator(emptyConfig) + const dict = coord.toDict() + expect(dict.tools).toEqual([]) + expect(dict.providers).toEqual([]) + expect(dict.has_orchestrator).toBe(false) + expect(dict.has_context).toBe(false) + expect(dict).toHaveProperty('capabilities') + }) + + it('config returns original config (coord.config is defined)', () => { + const coord = new JsCoordinator('{"key":"value"}') + const config = coord.config + expect(config).toBeDefined() + const parsed = JSON.parse(config) + expect(parsed).toEqual({ key: 'value' }) + }) + + it('cleanup completes without error', async () => { + const coord = new JsCoordinator(emptyConfig) + await coord.cleanup() + }) +}) diff --git a/bindings/node/__tests__/errors.test.ts b/bindings/node/__tests__/errors.test.ts new file mode 100644 index 0000000..10fa739 --- /dev/null +++ b/bindings/node/__tests__/errors.test.ts @@ -0,0 +1,56 @@ +import { describe, it, expect } from 'vitest' +import { JsAmplifierSession, amplifierErrorToJs } from '../index.js' + +describe('Error bridging — session constructor', () => { + it('invalid JSON config throws with /Invalid config JSON/ message', () => { + expect(() => new JsAmplifierSession('not json')).toThrow(/Invalid config JSON/) + }) + + it('missing orchestrator throws with /orchestrator/ in message', () => { + const config = JSON.stringify({ session: { context: 'context-simple' } }) + expect(() => new JsAmplifierSession(config)).toThrow(/orchestrator/) + }) + + it('missing context throws with /context/ in message', () => { + const config = JSON.stringify({ session: { orchestrator: 'loop-basic' } }) + expect(() => new JsAmplifierSession(config)).toThrow(/context/) + }) +}) + +describe('amplifierErrorToJs — variant to typed error object', () => { + it('converts session variant to SessionError code', () => { + const err = amplifierErrorToJs('session', 'not initialized') + expect(err.code).toBe('SessionError') + expect(err.message).toBe('not initialized') + }) + + it('converts tool variant to ToolError code', () => { + const err = amplifierErrorToJs('tool', 'tool not found: bash') + expect(err.code).toBe('ToolError') + expect(err.message).toBe('tool not found: bash') + }) + + it('converts provider variant to ProviderError code', () => { + const err = amplifierErrorToJs('provider', 'rate limited') + expect(err.code).toBe('ProviderError') + expect(err.message).toBe('rate limited') + }) + + it('converts hook variant to HookError code', () => { + const err = amplifierErrorToJs('hook', 'handler failed') + expect(err.code).toBe('HookError') + expect(err.message).toBe('handler failed') + }) + + it('converts context variant to ContextError code', () => { + const err = amplifierErrorToJs('context', 'compaction failed') + expect(err.code).toBe('ContextError') + expect(err.message).toBe('compaction failed') + }) + + it('converts unknown variant to AmplifierError fallback code', () => { + const err = amplifierErrorToJs('unknown', 'something went wrong') + expect(err.code).toBe('AmplifierError') + expect(err.message).toBe('something went wrong') + }) +}) diff --git a/bindings/node/__tests__/fixtures.ts b/bindings/node/__tests__/fixtures.ts new file mode 100644 index 0000000..5a1cf54 --- /dev/null +++ b/bindings/node/__tests__/fixtures.ts @@ -0,0 +1,5 @@ +export const validConfig = JSON.stringify({ + session: { orchestrator: 'loop-basic', context: 'context-simple' }, +}) + +export const emptyConfig = '{}' diff --git a/bindings/node/__tests__/hooks.test.ts b/bindings/node/__tests__/hooks.test.ts new file mode 100644 index 0000000..8ebd9eb --- /dev/null +++ b/bindings/node/__tests__/hooks.test.ts @@ -0,0 +1,88 @@ +import { describe, it, expect } from 'vitest' +import { JsHookRegistry, HookAction } from '../index.js' + +describe('JsHookRegistry', () => { + it('creates empty registry (listHandlers returns empty object)', () => { + const registry = new JsHookRegistry() + const handlers = registry.listHandlers() + expect(handlers).toEqual({}) + }) + + it('emits with no handlers returns Continue', async () => { + const registry = new JsHookRegistry() + const result = await registry.emit('tool:pre', '{"tool":"grep"}') + expect(result.action).toBe(HookAction.Continue) + }) + + it('registers and emits to a JS handler', async () => { + const registry = new JsHookRegistry() + let handlerCalled = false + let receivedEvent = '' + let receivedData = '' + + registry.register('tool:pre', (event: string, data: string) => { + handlerCalled = true + receivedEvent = event + receivedData = data + return JSON.stringify({ action: 'continue' }) + }, 10, 'my-hook') + + await registry.emit('tool:pre', '{"tool":"grep"}') + + expect(handlerCalled).toBe(true) + expect(receivedEvent).toBe('tool:pre') + expect(JSON.parse(receivedData)).toHaveProperty('tool', 'grep') + }) + + it('listHandlers returns registered handler names', () => { + const registry = new JsHookRegistry() + registry.register('tool:pre', (_event: string, _data: string) => { + return JSON.stringify({ action: 'continue' }) + }, 10, 'my-hook') + + const handlers = registry.listHandlers() + expect(handlers['tool:pre']).toContain('my-hook') + }) + + it('handler returning deny stops pipeline', async () => { + const registry = new JsHookRegistry() + registry.register('tool:pre', (_event: string, _data: string) => { + return JSON.stringify({ action: 'deny', reason: 'blocked' }) + }, 10, 'deny-hook') + + const result = await registry.emit('tool:pre', '{"tool":"rm"}') + expect(result.action).toBe(HookAction.Deny) + expect(result.reason).toBe('blocked') + }) + + it('returns Deny when hook handler returns invalid JSON (fail-closed)', async () => { + const registry = new JsHookRegistry() + registry.register( + 'tool:pre', + (_event: string, _data: string) => 'NOT VALID JSON {{{', + 10, + 'bad-json-hook' + ) + const result = await registry.emit('tool:pre', '{}') + expect(result.action).toBe(HookAction.Deny) + expect(result.reason).toContain('invalid') + }) + + it('setDefaultFields merges into emit data', async () => { + const registry = new JsHookRegistry() + let receivedData = '' + + registry.register('tool:pre', (_event: string, data: string) => { + receivedData = data + return JSON.stringify({ action: 'continue' }) + }, 10, 'capture-hook') + + registry.setDefaultFields('{"session_id":"s-123","custom":"value"}') + await registry.emit('tool:pre', '{"tool":"grep"}') + + const parsed = JSON.parse(receivedData) + expect(parsed).toHaveProperty('session_id', 's-123') + expect(parsed).toHaveProperty('custom', 'value') + expect(parsed).toHaveProperty('tool', 'grep') + }) +}) diff --git a/bindings/node/__tests__/integration.test.ts b/bindings/node/__tests__/integration.test.ts new file mode 100644 index 0000000..fb75a83 --- /dev/null +++ b/bindings/node/__tests__/integration.test.ts @@ -0,0 +1,211 @@ +import { describe, it, expect } from 'vitest' +import { + JsAmplifierSession, + JsCoordinator, + JsHookRegistry, + JsCancellationToken, + JsToolBridge, + HookAction, + ContextInjectionRole, + UserMessageLevel, +} from '../index.js' +import { validConfig, emptyConfig } from './fixtures' + +describe('Full session lifecycle', () => { + it('session -> coordinator -> hooks -> cancel lifecycle', async () => { + // Create session + const session = new JsAmplifierSession(validConfig) + expect(session.sessionId).toBeTruthy() + expect(session.isInitialized).toBe(false) + + // Access coordinator (createCoordinator returns a new instance from cached config) + const coord = session.createCoordinator() + expect(coord).toBeDefined() + + // Register capability and verify roundtrip + coord.registerCapability('streaming', JSON.stringify({ enabled: true, format: 'sse' })) + const cap = coord.getCapability('streaming') + expect(cap).not.toBeNull() + const parsed = JSON.parse(cap as string) + expect(parsed).toEqual({ enabled: true, format: 'sse' }) + + // Use cancellation: graceful + const cancellation = coord.cancellation + cancellation.requestGraceful('user stop') + expect(cancellation.isCancelled).toBe(true) + expect(cancellation.isGraceful).toBe(true) + + // Reset cancellation + cancellation.reset() + expect(cancellation.isCancelled).toBe(false) + + // Cleanup session + session.setInitialized() + expect(session.isInitialized).toBe(true) + await session.cleanup() + expect(session.isInitialized).toBe(false) + }) +}) + +describe('Hook handler roundtrip', () => { + it('JS handler receives event data and returns HookResult', async () => { + const registry = new JsHookRegistry() + let receivedEvent = '' + let receivedData: Record | null = null + + registry.register('tool:pre', (event: string, data: string) => { + receivedEvent = event + receivedData = JSON.parse(data) + return JSON.stringify({ action: 'continue' }) + }, 5, 'capture-handler') + + const result = await registry.emit('tool:pre', JSON.stringify({ tool_name: 'bash', command: 'ls' })) + + expect(receivedEvent).toBe('tool:pre') + expect(receivedData).toHaveProperty('tool_name', 'bash') + expect(receivedData).toHaveProperty('command', 'ls') + expect(result.action).toBe(HookAction.Continue) + }) + + it('deny handler short-circuits pipeline', async () => { + const registry = new JsHookRegistry() + let secondHandlerCalled = false + + // Denier at priority 0 (runs first — lower priority = first) + registry.register('tool:pre', (_event: string, _data: string) => { + return JSON.stringify({ action: 'deny', reason: 'not allowed' }) + }, 0, 'denier') + + // After-deny at priority 10 (should NOT run) + registry.register('tool:pre', (_event: string, _data: string) => { + secondHandlerCalled = true + return JSON.stringify({ action: 'continue' }) + }, 10, 'after-deny') + + const result = await registry.emit('tool:pre', JSON.stringify({ tool_name: 'rm' })) + + expect(result.action).toBe(HookAction.Deny) + expect(result.reason).toBe('not allowed') + expect(secondHandlerCalled).toBe(false) + }) +}) + +describe('Tool bridge execution', () => { + it('creates calculator tool and verifies name, spec, and execution', async () => { + const calculator = new JsToolBridge( + 'calculator', + 'Adds two numbers', + JSON.stringify({ + type: 'object', + properties: { + a: { type: 'number' }, + b: { type: 'number' }, + }, + }), + async (inputJson: string) => { + const input = JSON.parse(inputJson) + const sum = input.a + input.b + return JSON.stringify({ success: true, output: String(sum) }) + } + ) + + // Verify name + expect(calculator.name).toBe('calculator') + + // Verify getSpec() roundtrip + const spec = JSON.parse(calculator.getSpec()) + expect(spec.name).toBe('calculator') + expect(spec.parameters.type).toBe('object') + + // Execute and verify result + const resultJson = await calculator.execute(JSON.stringify({ a: 3, b: 4 })) + const result = JSON.parse(resultJson) + expect(result.success).toBe(true) + expect(result.output).toBe('7') + }) +}) + +describe('CancellationToken state machine', () => { + it('full cycle: None -> Graceful -> Immediate -> reset -> None', () => { + const token = new JsCancellationToken() + + // Initial state: None + expect(token.isCancelled).toBe(false) + expect(token.isGraceful).toBe(false) + expect(token.isImmediate).toBe(false) + + // None -> Graceful + token.requestGraceful() + expect(token.isCancelled).toBe(true) + expect(token.isGraceful).toBe(true) + expect(token.isImmediate).toBe(false) + + // Graceful -> Immediate + token.requestImmediate() + expect(token.isCancelled).toBe(true) + expect(token.isGraceful).toBe(false) + expect(token.isImmediate).toBe(true) + + // Immediate -> reset -> None + token.reset() + expect(token.isCancelled).toBe(false) + expect(token.isGraceful).toBe(false) + expect(token.isImmediate).toBe(false) + }) +}) + +describe('Type fidelity', () => { + it('SessionConfig validates required fields with extra providers/metadata', () => { + const config = JSON.stringify({ + session: { orchestrator: 'loop-basic', context: 'context-simple' }, + providers: [{ name: 'openai', model: 'gpt-4' }], + metadata: { user: 'test-user', env: 'ci' }, + }) + const session = new JsAmplifierSession(config) + expect(session.sessionId).toBeTruthy() + expect(session.status).toBe('running') + }) + + it('HookResult fields roundtrip with inject_context action', async () => { + const registry = new JsHookRegistry() + + registry.register('tool:pre', (_event: string, _data: string) => { + return JSON.stringify({ + action: 'inject_context', + context_injection: 'You are a helpful assistant', + context_injection_role: 'system', + ephemeral: true, + suppress_output: false, + user_message: 'Context injected', + user_message_level: 'info', + user_message_source: 'integration-test', + }) + }, 5, 'inject-handler') + + const result = await registry.emit('tool:pre', '{}') + + expect(result.action).toBe(HookAction.InjectContext) + expect(result.contextInjection).toBe('You are a helpful assistant') + expect(result.contextInjectionRole).toBe(ContextInjectionRole.System) + expect(result.ephemeral).toBe(true) + expect(result.suppressOutput).toBe(false) + expect(result.userMessage).toBe('Context injected') + expect(result.userMessageLevel).toBe(UserMessageLevel.Info) + expect(result.userMessageSource).toBe('integration-test') + }) + + it('Coordinator toDict returns all expected fields', () => { + const coord = new JsCoordinator(emptyConfig) + const dict = coord.toDict() + + // Arrays + expect(Array.isArray(dict.tools)).toBe(true) + expect(Array.isArray(dict.providers)).toBe(true) + expect(dict.capabilities).toBeDefined() + expect(typeof dict.capabilities).toBe('object') + + // Booleans + expect(typeof dict.has_orchestrator).toBe('boolean') + expect(typeof dict.has_context).toBe('boolean') + }) +}) diff --git a/bindings/node/__tests__/modules.test.ts b/bindings/node/__tests__/modules.test.ts new file mode 100644 index 0000000..709e459 --- /dev/null +++ b/bindings/node/__tests__/modules.test.ts @@ -0,0 +1,100 @@ +import { describe, it, expect } from 'vitest' +import { JsToolBridge } from '../index.js' + +describe('JsToolBridge', () => { + it('creates a JsToolBridge wrapping a TS tool object', () => { + const tool = new JsToolBridge( + 'echo', + 'Echoes back the input', + '{"type": "object", "properties": {"message": {"type": "string"}}}', + async (inputJson: string) => { + const input = JSON.parse(inputJson) + return JSON.stringify({ success: true, output: input.message }) + } + ) + + expect(tool.name).toBe('echo') + expect(tool.description).toBe('Echoes back the input') + }) + + it('executes a tool through the bridge', async () => { + const tool = new JsToolBridge( + 'greet', + 'Greets someone by name', + '{"type": "object", "properties": {"name": {"type": "string"}}}', + async (inputJson: string) => { + const input = JSON.parse(inputJson) + return JSON.stringify({ success: true, output: `Hello, ${input.name}!` }) + } + ) + + const resultJson = await tool.execute(JSON.stringify({ name: 'World' })) + const result = JSON.parse(resultJson) + + expect(result.output).toBe('Hello, World!') + expect(result.success).toBe(true) + }) + + it('handles tool execution errors', async () => { + const tool = new JsToolBridge( + 'failing', + 'A tool that always fails', + '{}', + async (_inputJson: string) => { + return JSON.stringify({ success: false, error: 'Something went wrong' }) + } + ) + + const resultJson = await tool.execute('{}') + const result = JSON.parse(resultJson) + + expect(result.success).toBe(false) + expect(result.error).toBe('Something went wrong') + }) + + it('getSpec returns valid JSON with name, description, and parameters', () => { + const params = '{"type": "object", "properties": {"x": {"type": "number"}}}' + const tool = new JsToolBridge( + 'calc', + 'A calculator tool', + params, + async (_inputJson: string) => '{}' + ) + + const spec = JSON.parse(tool.getSpec()) + + expect(spec.name).toBe('calc') + expect(spec.description).toBe('A calculator tool') + expect(spec.parameters).toEqual(JSON.parse(params)) + }) + + it('rejects when the JS callback throws an exception', async () => { + const tool = new JsToolBridge( + 'thrower', + 'A tool whose callback throws', + '{}', + async (_inputJson: string) => { + throw new Error('callback exploded') + } + ) + + await expect(tool.execute('{}')).rejects.toThrow('callback exploded') + }) + + // Expect Rust-side log: "JsToolBridge::get_spec() failed to parse parameters_json" + // This is intentional — we're testing the fallback behavior for invalid JSON. + it('getSpec falls back to empty object for malformed parametersJson', () => { + const tool = new JsToolBridge( + 'broken', + 'Tool with bad params', + 'not valid json{{{', + async (_inputJson: string) => '{}' + ) + + const spec = JSON.parse(tool.getSpec()) + + expect(spec.name).toBe('broken') + expect(spec.description).toBe('Tool with bad params') + expect(spec.parameters).toEqual({}) + }) +}) diff --git a/bindings/node/__tests__/node-wasm-session.test.ts b/bindings/node/__tests__/node-wasm-session.test.ts new file mode 100644 index 0000000..07cb3a0 --- /dev/null +++ b/bindings/node/__tests__/node-wasm-session.test.ts @@ -0,0 +1,50 @@ +import { describe, it, expect } from 'vitest' +import { resolveModule, loadWasmFromPath } from '../index.js' +import * as path from 'path' +import * as fs from 'fs' +import * as os from 'os' + +/** + * Tests the TypeScript → Napi-RS → Rust resolver → wasmtime → WASM tool pipeline. + * + * Uses a temp directory with a single echo-tool.wasm to ensure deterministic + * resolution (the fixture directory has multiple .wasm files and readdir order + * is filesystem-dependent). + */ +describe('Node WASM session pipeline', () => { + const fixtureBase = path.resolve(__dirname, '..', '..', '..', 'tests', 'fixtures', 'wasm') + + function withEchoToolDir(fn: (dir: string) => void) { + const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'amplifier-node-wasm-test-')) + try { + fs.copyFileSync( + path.join(fixtureBase, 'echo-tool.wasm'), + path.join(tmpDir, 'echo-tool.wasm') + ) + fn(tmpDir) + } finally { + fs.rmSync(tmpDir, { recursive: true }) + } + } + + it('resolveModule returns transport=wasm and moduleType=tool for echo-tool', () => { + withEchoToolDir((dir) => { + const manifest = resolveModule(dir) + expect(manifest.transport).toBe('wasm') + expect(manifest.moduleType).toBe('tool') + expect(manifest.artifactType).toBe('wasm') + }) + }) + + it('loadWasmFromPath loads echo-tool and returns loaded:Tool', () => { + withEchoToolDir((dir) => { + const result = loadWasmFromPath(dir) + expect(result).toBe('loaded:Tool') + }) + }) + + it('example script exists at examples/node-wasm-session.ts', () => { + const scriptPath = path.resolve(__dirname, '..', '..', '..', 'examples', 'node-wasm-session.ts') + expect(fs.existsSync(scriptPath)).toBe(true) + }) +}) diff --git a/bindings/node/__tests__/session.test.ts b/bindings/node/__tests__/session.test.ts new file mode 100644 index 0000000..e43e052 --- /dev/null +++ b/bindings/node/__tests__/session.test.ts @@ -0,0 +1,83 @@ +import { describe, it, expect } from 'vitest' +import { JsAmplifierSession } from '../index.js' +import { validConfig } from './fixtures' + +describe('JsAmplifierSession', () => { + it('creates with valid config and generates session ID', () => { + const session = new JsAmplifierSession(validConfig) + expect(session.sessionId).toBeTruthy() + expect(session.sessionId.length).toBeGreaterThan(0) + }) + + it('creates with custom session ID', () => { + const session = new JsAmplifierSession(validConfig, 'custom-id') + expect(session.sessionId).toBe('custom-id') + }) + + it('creates with parent ID', () => { + const session = new JsAmplifierSession(validConfig, undefined, 'parent-123') + expect(session.parentId).toBe('parent-123') + }) + + it('parentId is null when no parent', () => { + const session = new JsAmplifierSession(validConfig) + expect(session.parentId).toBeNull() + }) + + it('starts as not initialized', () => { + const session = new JsAmplifierSession(validConfig) + expect(session.isInitialized).toBe(false) + }) + + it('status starts as running', () => { + const session = new JsAmplifierSession(validConfig) + expect(session.status).toBe('running') + }) + + // createCoordinator() creates a NEW Coordinator from cached config each call — + // this is the known limitation documented by the rename from `.coordinator` getter. + it('createCoordinator() returns a coordinator built from session config', () => { + const session = new JsAmplifierSession(validConfig) + const coord = session.createCoordinator() + expect(coord).toBeDefined() + // Verify coordinator was constructed from the session's config, not a default + const coordConfig = JSON.parse(coord.config) + expect(coordConfig).toHaveProperty('session') + }) + + it('createCoordinator creates a new instance each call (pins detached behavior)', () => { + const session = new JsAmplifierSession(validConfig) + const c1 = session.createCoordinator() + const c2 = session.createCoordinator() + expect(c1).not.toBe(c2) + }) + + it('setInitialized marks session as initialized', () => { + const session = new JsAmplifierSession(validConfig) + expect(session.isInitialized).toBe(false) + session.setInitialized() + expect(session.isInitialized).toBe(true) + }) + + it('rejects empty config', () => { + expect(() => new JsAmplifierSession('{}')).toThrow() + }) + + it('rejects config without orchestrator', () => { + const config = JSON.stringify({ session: { context: 'context-simple' } }) + expect(() => new JsAmplifierSession(config)).toThrow(/orchestrator/) + }) + + it('rejects config without context', () => { + const config = JSON.stringify({ session: { orchestrator: 'loop-basic' } }) + expect(() => new JsAmplifierSession(config)).toThrow(/context/) + }) + + it('cleanup clears initialized flag', async () => { + const session = new JsAmplifierSession(validConfig) + session.setInitialized() + expect(session.isInitialized).toBe(true) + await session.cleanup() + expect(session.isInitialized).toBe(false) + }) +}) diff --git a/bindings/node/__tests__/smoke.test.ts b/bindings/node/__tests__/smoke.test.ts new file mode 100644 index 0000000..7824e62 --- /dev/null +++ b/bindings/node/__tests__/smoke.test.ts @@ -0,0 +1,8 @@ +import { hello } from '../index.js' +import { describe, it, expect } from 'vitest' + +describe('amplifier-core native addon', () => { + it('hello() returns expected greeting', () => { + expect(hello()).toBe('Hello from amplifier-core native addon!') + }) +}) diff --git a/bindings/node/__tests__/types.test.ts b/bindings/node/__tests__/types.test.ts new file mode 100644 index 0000000..ce872fe --- /dev/null +++ b/bindings/node/__tests__/types.test.ts @@ -0,0 +1,64 @@ +import { describe, it, expect } from 'vitest' +import { + HookAction, + SessionState, + ContextInjectionRole, + ApprovalDefault, + UserMessageLevel, + Role, +} from '../index.js' + +describe('enum types', () => { + describe('HookAction', () => { + it('has all expected variants with correct string values', () => { + expect(HookAction.Continue).toBe('Continue') + expect(HookAction.Deny).toBe('Deny') + expect(HookAction.Modify).toBe('Modify') + expect(HookAction.InjectContext).toBe('InjectContext') + expect(HookAction.AskUser).toBe('AskUser') + }) + }) + + describe('SessionState', () => { + it('has all expected variants with correct string values', () => { + expect(SessionState.Running).toBe('Running') + expect(SessionState.Completed).toBe('Completed') + expect(SessionState.Failed).toBe('Failed') + expect(SessionState.Cancelled).toBe('Cancelled') + }) + }) + + describe('ContextInjectionRole', () => { + it('has all expected variants with correct string values', () => { + expect(ContextInjectionRole.System).toBe('System') + expect(ContextInjectionRole.User).toBe('User') + expect(ContextInjectionRole.Assistant).toBe('Assistant') + }) + }) + + describe('ApprovalDefault', () => { + it('has all expected variants with correct string values', () => { + expect(ApprovalDefault.Allow).toBe('Allow') + expect(ApprovalDefault.Deny).toBe('Deny') + }) + }) + + describe('UserMessageLevel', () => { + it('has all expected variants with correct string values', () => { + expect(UserMessageLevel.Info).toBe('Info') + expect(UserMessageLevel.Warning).toBe('Warning') + expect(UserMessageLevel.Error).toBe('Error') + }) + }) + + describe('Role', () => { + it('has all expected variants with correct string values', () => { + expect(Role.System).toBe('System') + expect(Role.Developer).toBe('Developer') + expect(Role.User).toBe('User') + expect(Role.Assistant).toBe('Assistant') + expect(Role.Function).toBe('Function') + expect(Role.Tool).toBe('Tool') + }) + }) +}) diff --git a/bindings/node/build.rs b/bindings/node/build.rs new file mode 100644 index 0000000..9fc2367 --- /dev/null +++ b/bindings/node/build.rs @@ -0,0 +1,5 @@ +extern crate napi_build; + +fn main() { + napi_build::setup(); +} diff --git a/bindings/node/index.d.ts b/bindings/node/index.d.ts new file mode 100644 index 0000000..65b0acf --- /dev/null +++ b/bindings/node/index.d.ts @@ -0,0 +1,269 @@ +/* tslint:disable */ +/* eslint-disable */ + +/* auto-generated by NAPI-RS */ + +export declare function hello(): string +export const enum HookAction { + Continue = 'Continue', + Deny = 'Deny', + Modify = 'Modify', + InjectContext = 'InjectContext', + AskUser = 'AskUser' +} +export const enum SessionState { + Running = 'Running', + Completed = 'Completed', + Failed = 'Failed', + Cancelled = 'Cancelled' +} +export const enum ContextInjectionRole { + System = 'System', + User = 'User', + Assistant = 'Assistant' +} +export const enum ApprovalDefault { + Allow = 'Allow', + Deny = 'Deny' +} +export const enum UserMessageLevel { + Info = 'Info', + Warning = 'Warning', + Error = 'Error' +} +export const enum Role { + System = 'System', + Developer = 'Developer', + User = 'User', + Assistant = 'Assistant', + Function = 'Function', + Tool = 'Tool' +} +export interface JsToolResult { + success: boolean + output?: string + error?: string +} +export interface JsToolSpec { + name: string + description?: string + parametersJson: string +} +export interface JsHookResult { + action: HookAction + reason?: string + contextInjection?: string + contextInjectionRole?: ContextInjectionRole + ephemeral?: boolean + suppressOutput?: boolean + userMessage?: string + userMessageLevel?: UserMessageLevel + userMessageSource?: string + approvalPrompt?: string + approvalTimeout?: number + approvalDefault?: ApprovalDefault +} +export interface JsSessionConfig { + configJson: string +} +/** Structured error object returned to JS with a typed `code` property. */ +export interface JsAmplifierError { + code: string + message: string +} +/** + * Converts an error variant name and message into a typed `JsAmplifierError`. + * + * See [`error_code_for_variant`] for the variant → code mapping. + */ +export declare function amplifierErrorToJs(variant: string, message: string): JsAmplifierError +/** Result from resolving a module path. */ +export interface JsModuleManifest { + /** + * How the module is loaded and invoked. + * + * Valid values (string literal union): + * `"python"` | `"wasm"` | `"grpc"` | `"native"` + */ + transport: string + /** + * Logical role the module plays inside the kernel. + * + * Valid values (string literal union): + * `"tool"` | `"hook"` | `"context"` | `"approval"` | `"provider"` | `"orchestrator"` + */ + moduleType: string + /** + * Artifact format used to locate or load the module. + * + * Valid values (string literal union): + * `"wasm"` | `"grpc"` | `"python"` + * + * - `"wasm"` — `artifactPath` contains the `.wasm` component file path + * - `"grpc"` — `endpoint` contains the gRPC service URL + * - `"python"` — `packageName` contains the importable Python package name + */ + artifactType: string + /** Path to WASM artifact (present when `artifactType` is `"wasm"`). */ + artifactPath?: string + /** gRPC service endpoint URL (present when `artifactType` is `"grpc"`). */ + endpoint?: string + /** Python package name for import (present when `artifactType` is `"python"`). */ + packageName?: string +} +/** + * Resolve a module from a filesystem path. + * + * Returns a JsModuleManifest describing the transport, module type, and artifact. + */ +export declare function resolveModule(path: string): JsModuleManifest +/** + * Load a WASM module from a path and return status info. + * + * For WASM modules: loads the component and returns module type info. + * For Python modules: returns an error (TS host can't load Python). + */ +export declare function loadWasmFromPath(path: string): string +/** + * Wraps `amplifier_core::CancellationToken` for Node.js. + * + * State machine: None → Graceful → Immediate, with reset back to None. + */ +export declare class JsCancellationToken { + constructor() + get isCancelled(): boolean + get isGraceful(): boolean + get isImmediate(): boolean + requestGraceful(reason?: string | undefined | null): void + requestImmediate(reason?: string | undefined | null): void + reset(): void +} +/** + * Wraps `amplifier_core::HookRegistry` for Node.js. + * + * Provides register/emit/listHandlers/setDefaultFields — the event backbone + * of the kernel. + */ +export declare class JsHookRegistry { + constructor() + /** + * Register a hook handler for the given event name. + * + * ## Handler signature + * + * The `handler` callback receives two string arguments and must return a + * JSON-serialized `HookResult` (or a `Promise` that resolves to one): + * + * ```ts + * (event: string, dataJson: string) => string | Promise + * ``` + * + * Where the return value is a JSON string matching the `JsHookResult` + * shape, e.g. `'{"action":"Continue"}'`. If the handler returns an + * invalid JSON string, the kernel fails closed and treats it as `Deny`. + */ + register(event: string, handler: (...args: any[]) => any, priority: number, name: string): void + emit(event: string, dataJson: string): Promise + listHandlers(): Record> + setDefaultFields(defaultsJson: string): void +} +/** + * Wraps `amplifier_core::Coordinator` for Node.js — the central hub holding + * module mount points, capabilities, hook registry, cancellation token, and config. + * + * Implements the hybrid coordinator pattern: JS-side storage for TS module + * objects, Rust kernel for everything else. + */ +export declare class JsCoordinator { + constructor(configJson: string) + get toolNames(): Array + get providerNames(): Array + get hasOrchestrator(): boolean + get hasContext(): boolean + registerCapability(name: string, valueJson: string): void + getCapability(name: string): string | null + /** + * Creates a new **detached** (empty) JsHookRegistry. + * + * ⚠吅 **Each call returns a brand-new, empty registry** — hooks registered + * on one instance are invisible to the next. This is a known limitation: + * `Coordinator` owns its `HookRegistry` by value, not behind `Arc`, so + * the binding cannot share state across calls. + * + * The method name (`createHookRegistry`) intentionally signals "creates new + * instance" — a getter property would imply referential stability in JS. + * + * **Workaround:** create a `JsHookRegistry` directly and hold a reference. + * + * Future TODO #1: restructure the kernel to hold `Arc` inside + * `Coordinator` so this method can share the same registry instance. + */ + createHookRegistry(): JsHookRegistry + get cancellation(): JsCancellationToken + get config(): string + resetTurn(): void + toDict(): Record + cleanup(): Promise +} +/** + * Wraps `amplifier_core::Session` for Node.js — the top-level entry point. + * + * Lifecycle: `new AmplifierSession(config) → initialize() → execute(prompt) → cleanup()`. + * Wires together Coordinator, HookRegistry, and CancellationToken. + * + * Known limitation: `coordinator` getter creates a separate Coordinator instance + * because the kernel Session owns its Coordinator by value, not behind Arc. + * Sharing requires restructuring the Rust kernel — tracked as Future TODO #1. + */ +export declare class JsAmplifierSession { + constructor(configJson: string, sessionId?: string | undefined | null, parentId?: string | undefined | null) + get sessionId(): string + get parentId(): string | null + get isInitialized(): boolean + /** + * Current session lifecycle state as a lowercase string. + * + * Returns one of the `SessionState` variant strings: + * - `"Running"` — session is active + * - `"Completed"` — session finished successfully + * - `"Failed"` — session encountered a fatal error + * - `"Cancelled"` — session was cancelled via the cancellation token + * + * Falls back to `"running"` if the session lock is held during `cleanup()`. + */ + get status(): string + /** + * Creates a new **fresh** JsCoordinator from this session's cached config. + * + * ⚠吅 **Each call allocates a new Coordinator** — capabilities registered on + * one instance are invisible to the next. This is a known limitation: + * `Session` owns its `Coordinator` by value, not behind `Arc`, so the + * binding cannot expose the session's live coordinator. + * + * The method name (`createCoordinator`) intentionally signals "creates new + * instance" — a getter property would imply referential stability in JS. + * + * **Workaround:** call `createCoordinator()` once, hold the returned instance, + * and register capabilities on it before passing it to other APIs. + * + * Future TODO #1: restructure the kernel to hold `Arc` inside + * `Session` so this method can return a handle to the session's actual coordinator. + */ + createCoordinator(): JsCoordinator + setInitialized(): void + cleanup(): Promise +} +/** + * Bridges a TypeScript tool object to Rust via `ThreadsafeFunction`. + * + * In the hybrid coordinator pattern, these bridge objects are stored in a + * JS-side Map (not in the Rust Coordinator). The JS orchestrator retrieves + * them by name and calls `execute()` directly. + */ +export declare class JsToolBridge { + constructor(name: string, description: string, parametersJson: string, executeFn: (inputJson: string) => Promise) + get name(): string + get description(): string + execute(inputJson: string): Promise + getSpec(): string +} diff --git a/bindings/node/index.js b/bindings/node/index.js new file mode 100644 index 0000000..791c1ad --- /dev/null +++ b/bindings/node/index.js @@ -0,0 +1,329 @@ +/* tslint:disable */ +/* eslint-disable */ +/* prettier-ignore */ + +/* auto-generated by NAPI-RS */ + +const { existsSync, readFileSync } = require('fs') +const { join } = require('path') + +const { platform, arch } = process + +let nativeBinding = null +let localFileExisted = false +let loadError = null + +function isMusl() { + // For Node 10 + if (!process.report || typeof process.report.getReport !== 'function') { + try { + const lddPath = require('child_process').execSync('which ldd').toString().trim() + return readFileSync(lddPath, 'utf8').includes('musl') + } catch (e) { + return true + } + } else { + const { glibcVersionRuntime } = process.report.getReport().header + return !glibcVersionRuntime + } +} + +switch (platform) { + case 'android': + switch (arch) { + case 'arm64': + localFileExisted = existsSync(join(__dirname, 'amplifier-core.android-arm64.node')) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.android-arm64.node') + } else { + nativeBinding = require('amplifier-core-android-arm64') + } + } catch (e) { + loadError = e + } + break + case 'arm': + localFileExisted = existsSync(join(__dirname, 'amplifier-core.android-arm-eabi.node')) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.android-arm-eabi.node') + } else { + nativeBinding = require('amplifier-core-android-arm-eabi') + } + } catch (e) { + loadError = e + } + break + default: + throw new Error(`Unsupported architecture on Android ${arch}`) + } + break + case 'win32': + switch (arch) { + case 'x64': + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.win32-x64-msvc.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.win32-x64-msvc.node') + } else { + nativeBinding = require('amplifier-core-win32-x64-msvc') + } + } catch (e) { + loadError = e + } + break + case 'ia32': + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.win32-ia32-msvc.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.win32-ia32-msvc.node') + } else { + nativeBinding = require('amplifier-core-win32-ia32-msvc') + } + } catch (e) { + loadError = e + } + break + case 'arm64': + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.win32-arm64-msvc.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.win32-arm64-msvc.node') + } else { + nativeBinding = require('amplifier-core-win32-arm64-msvc') + } + } catch (e) { + loadError = e + } + break + default: + throw new Error(`Unsupported architecture on Windows: ${arch}`) + } + break + case 'darwin': + localFileExisted = existsSync(join(__dirname, 'amplifier-core.darwin-universal.node')) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.darwin-universal.node') + } else { + nativeBinding = require('amplifier-core-darwin-universal') + } + break + } catch {} + switch (arch) { + case 'x64': + localFileExisted = existsSync(join(__dirname, 'amplifier-core.darwin-x64.node')) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.darwin-x64.node') + } else { + nativeBinding = require('amplifier-core-darwin-x64') + } + } catch (e) { + loadError = e + } + break + case 'arm64': + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.darwin-arm64.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.darwin-arm64.node') + } else { + nativeBinding = require('amplifier-core-darwin-arm64') + } + } catch (e) { + loadError = e + } + break + default: + throw new Error(`Unsupported architecture on macOS: ${arch}`) + } + break + case 'freebsd': + if (arch !== 'x64') { + throw new Error(`Unsupported architecture on FreeBSD: ${arch}`) + } + localFileExisted = existsSync(join(__dirname, 'amplifier-core.freebsd-x64.node')) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.freebsd-x64.node') + } else { + nativeBinding = require('amplifier-core-freebsd-x64') + } + } catch (e) { + loadError = e + } + break + case 'linux': + switch (arch) { + case 'x64': + if (isMusl()) { + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-x64-musl.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-x64-musl.node') + } else { + nativeBinding = require('amplifier-core-linux-x64-musl') + } + } catch (e) { + loadError = e + } + } else { + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-x64-gnu.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-x64-gnu.node') + } else { + nativeBinding = require('amplifier-core-linux-x64-gnu') + } + } catch (e) { + loadError = e + } + } + break + case 'arm64': + if (isMusl()) { + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-arm64-musl.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-arm64-musl.node') + } else { + nativeBinding = require('amplifier-core-linux-arm64-musl') + } + } catch (e) { + loadError = e + } + } else { + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-arm64-gnu.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-arm64-gnu.node') + } else { + nativeBinding = require('amplifier-core-linux-arm64-gnu') + } + } catch (e) { + loadError = e + } + } + break + case 'arm': + if (isMusl()) { + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-arm-musleabihf.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-arm-musleabihf.node') + } else { + nativeBinding = require('amplifier-core-linux-arm-musleabihf') + } + } catch (e) { + loadError = e + } + } else { + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-arm-gnueabihf.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-arm-gnueabihf.node') + } else { + nativeBinding = require('amplifier-core-linux-arm-gnueabihf') + } + } catch (e) { + loadError = e + } + } + break + case 'riscv64': + if (isMusl()) { + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-riscv64-musl.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-riscv64-musl.node') + } else { + nativeBinding = require('amplifier-core-linux-riscv64-musl') + } + } catch (e) { + loadError = e + } + } else { + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-riscv64-gnu.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-riscv64-gnu.node') + } else { + nativeBinding = require('amplifier-core-linux-riscv64-gnu') + } + } catch (e) { + loadError = e + } + } + break + case 's390x': + localFileExisted = existsSync( + join(__dirname, 'amplifier-core.linux-s390x-gnu.node') + ) + try { + if (localFileExisted) { + nativeBinding = require('./amplifier-core.linux-s390x-gnu.node') + } else { + nativeBinding = require('amplifier-core-linux-s390x-gnu') + } + } catch (e) { + loadError = e + } + break + default: + throw new Error(`Unsupported architecture on Linux: ${arch}`) + } + break + default: + throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`) +} + +if (!nativeBinding) { + if (loadError) { + throw loadError + } + throw new Error(`Failed to load native binding`) +} + +const { hello, HookAction, SessionState, ContextInjectionRole, ApprovalDefault, UserMessageLevel, Role, JsCancellationToken, JsHookRegistry, JsCoordinator, JsAmplifierSession, JsToolBridge, amplifierErrorToJs, resolveModule, loadWasmFromPath } = nativeBinding + +module.exports.hello = hello +module.exports.HookAction = HookAction +module.exports.SessionState = SessionState +module.exports.ContextInjectionRole = ContextInjectionRole +module.exports.ApprovalDefault = ApprovalDefault +module.exports.UserMessageLevel = UserMessageLevel +module.exports.Role = Role +module.exports.JsCancellationToken = JsCancellationToken +module.exports.JsHookRegistry = JsHookRegistry +module.exports.JsCoordinator = JsCoordinator +module.exports.JsAmplifierSession = JsAmplifierSession +module.exports.JsToolBridge = JsToolBridge +module.exports.amplifierErrorToJs = amplifierErrorToJs +module.exports.resolveModule = resolveModule +module.exports.loadWasmFromPath = loadWasmFromPath diff --git a/bindings/node/package-lock.json b/bindings/node/package-lock.json new file mode 100644 index 0000000..2ca76df --- /dev/null +++ b/bindings/node/package-lock.json @@ -0,0 +1,1619 @@ +{ + "name": "amplifier-core", + "version": "1.0.10", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "amplifier-core", + "version": "1.0.10", + "devDependencies": { + "@napi-rs/cli": "^2", + "typescript": "^5", + "vitest": "^3" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.3.tgz", + "integrity": "sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.3.tgz", + "integrity": "sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.3.tgz", + "integrity": "sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.3.tgz", + "integrity": "sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.3.tgz", + "integrity": "sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.3.tgz", + "integrity": "sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.3.tgz", + "integrity": "sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.3.tgz", + "integrity": "sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.3.tgz", + "integrity": "sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.3.tgz", + "integrity": "sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.3.tgz", + "integrity": "sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.3.tgz", + "integrity": "sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.3.tgz", + "integrity": "sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.3.tgz", + "integrity": "sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.3.tgz", + "integrity": "sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.3.tgz", + "integrity": "sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.3.tgz", + "integrity": "sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.3.tgz", + "integrity": "sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.3.tgz", + "integrity": "sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.3.tgz", + "integrity": "sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.3.tgz", + "integrity": "sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.3.tgz", + "integrity": "sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.3.tgz", + "integrity": "sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.3.tgz", + "integrity": "sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.3.tgz", + "integrity": "sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.3.tgz", + "integrity": "sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@napi-rs/cli": { + "version": "2.18.4", + "resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.18.4.tgz", + "integrity": "sha512-SgJeA4df9DE2iAEpr3M2H0OKl/yjtg1BnRI5/JyowS71tUWhrfSu2LT0V3vlHET+g1hBVlrO60PmEXwUEKp8Mg==", + "dev": true, + "license": "MIT", + "bin": { + "napi": "scripts/index.js" + }, + "engines": { + "node": ">= 10" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Brooooooklyn" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz", + "integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz", + "integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz", + "integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz", + "integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz", + "integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz", + "integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz", + "integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz", + "integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz", + "integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz", + "integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz", + "integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz", + "integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz", + "integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz", + "integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz", + "integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz", + "integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz", + "integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz", + "integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz", + "integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz", + "integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz", + "integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz", + "integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz", + "integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz", + "integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz", + "integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@types/chai": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", + "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*", + "assertion-error": "^2.0.1" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@vitest/expect": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/mocker": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/spy": "3.2.4", + "estree-walker": "^3.0.3", + "magic-string": "^0.30.17" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "msw": "^2.4.9", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "peerDependenciesMeta": { + "msw": { + "optional": true + }, + "vite": { + "optional": true + } + } + }, + "node_modules/@vitest/pretty-format": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/runner": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/snapshot": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "magic-string": "^0.30.17", + "pathe": "^2.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/spy": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", + "dev": true, + "license": "MIT", + "dependencies": { + "tinyspy": "^4.0.3" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/@vitest/utils": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", + "tinyrainbow": "^2.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/assertion-error": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", + "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/chai": { + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", + "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==", + "dev": true, + "license": "MIT", + "dependencies": { + "assertion-error": "^2.0.1", + "check-error": "^2.1.1", + "deep-eql": "^5.0.1", + "loupe": "^3.1.0", + "pathval": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/check-error": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-2.1.3.tgz", + "integrity": "sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 16" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-eql": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-5.0.2.tgz", + "integrity": "sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "dev": true, + "license": "MIT" + }, + "node_modules/esbuild": { + "version": "0.27.3", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.3.tgz", + "integrity": "sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.3", + "@esbuild/android-arm": "0.27.3", + "@esbuild/android-arm64": "0.27.3", + "@esbuild/android-x64": "0.27.3", + "@esbuild/darwin-arm64": "0.27.3", + "@esbuild/darwin-x64": "0.27.3", + "@esbuild/freebsd-arm64": "0.27.3", + "@esbuild/freebsd-x64": "0.27.3", + "@esbuild/linux-arm": "0.27.3", + "@esbuild/linux-arm64": "0.27.3", + "@esbuild/linux-ia32": "0.27.3", + "@esbuild/linux-loong64": "0.27.3", + "@esbuild/linux-mips64el": "0.27.3", + "@esbuild/linux-ppc64": "0.27.3", + "@esbuild/linux-riscv64": "0.27.3", + "@esbuild/linux-s390x": "0.27.3", + "@esbuild/linux-x64": "0.27.3", + "@esbuild/netbsd-arm64": "0.27.3", + "@esbuild/netbsd-x64": "0.27.3", + "@esbuild/openbsd-arm64": "0.27.3", + "@esbuild/openbsd-x64": "0.27.3", + "@esbuild/openharmony-arm64": "0.27.3", + "@esbuild/sunos-x64": "0.27.3", + "@esbuild/win32-arm64": "0.27.3", + "@esbuild/win32-ia32": "0.27.3", + "@esbuild/win32-x64": "0.27.3" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/expect-type": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", + "integrity": "sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/loupe": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", + "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "dev": true, + "license": "MIT" + }, + "node_modules/pathval": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14.16" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/postcss": { + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/rollup": { + "version": "4.59.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz", + "integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.59.0", + "@rollup/rollup-android-arm64": "4.59.0", + "@rollup/rollup-darwin-arm64": "4.59.0", + "@rollup/rollup-darwin-x64": "4.59.0", + "@rollup/rollup-freebsd-arm64": "4.59.0", + "@rollup/rollup-freebsd-x64": "4.59.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.59.0", + "@rollup/rollup-linux-arm-musleabihf": "4.59.0", + "@rollup/rollup-linux-arm64-gnu": "4.59.0", + "@rollup/rollup-linux-arm64-musl": "4.59.0", + "@rollup/rollup-linux-loong64-gnu": "4.59.0", + "@rollup/rollup-linux-loong64-musl": "4.59.0", + "@rollup/rollup-linux-ppc64-gnu": "4.59.0", + "@rollup/rollup-linux-ppc64-musl": "4.59.0", + "@rollup/rollup-linux-riscv64-gnu": "4.59.0", + "@rollup/rollup-linux-riscv64-musl": "4.59.0", + "@rollup/rollup-linux-s390x-gnu": "4.59.0", + "@rollup/rollup-linux-x64-gnu": "4.59.0", + "@rollup/rollup-linux-x64-musl": "4.59.0", + "@rollup/rollup-openbsd-x64": "4.59.0", + "@rollup/rollup-openharmony-arm64": "4.59.0", + "@rollup/rollup-win32-arm64-msvc": "4.59.0", + "@rollup/rollup-win32-ia32-msvc": "4.59.0", + "@rollup/rollup-win32-x64-gnu": "4.59.0", + "@rollup/rollup-win32-x64-msvc": "4.59.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/siginfo": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/siginfo/-/siginfo-2.0.0.tgz", + "integrity": "sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==", + "dev": true, + "license": "ISC" + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stackback": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", + "integrity": "sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==", + "dev": true, + "license": "MIT" + }, + "node_modules/std-env": { + "version": "3.10.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.10.0.tgz", + "integrity": "sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==", + "dev": true, + "license": "MIT" + }, + "node_modules/strip-literal": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.1.0.tgz", + "integrity": "sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^9.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/tinybench": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.9.0.tgz", + "integrity": "sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyexec": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-0.3.2.tgz", + "integrity": "sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==", + "dev": true, + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinypool": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, + "node_modules/tinyrainbow": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tinyrainbow/-/tinyrainbow-2.0.0.tgz", + "integrity": "sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/tinyspy": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.4.tgz", + "integrity": "sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vitest": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", + "magic-string": "^0.30.17", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", + "tinybench": "^2.9.0", + "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", + "tinyrainbow": "^2.0.0", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", + "why-is-node-running": "^2.3.0" + }, + "bin": { + "vitest": "vitest.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + }, + "peerDependencies": { + "@edge-runtime/vm": "*", + "@types/debug": "^4.1.12", + "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", + "happy-dom": "*", + "jsdom": "*" + }, + "peerDependenciesMeta": { + "@edge-runtime/vm": { + "optional": true + }, + "@types/debug": { + "optional": true + }, + "@types/node": { + "optional": true + }, + "@vitest/browser": { + "optional": true + }, + "@vitest/ui": { + "optional": true + }, + "happy-dom": { + "optional": true + }, + "jsdom": { + "optional": true + } + } + }, + "node_modules/why-is-node-running": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/why-is-node-running/-/why-is-node-running-2.3.0.tgz", + "integrity": "sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==", + "dev": true, + "license": "MIT", + "dependencies": { + "siginfo": "^2.0.0", + "stackback": "0.0.2" + }, + "bin": { + "why-is-node-running": "cli.js" + }, + "engines": { + "node": ">=8" + } + } + } +} diff --git a/bindings/node/package.json b/bindings/node/package.json new file mode 100644 index 0000000..323fed1 --- /dev/null +++ b/bindings/node/package.json @@ -0,0 +1,20 @@ +{ + "name": "amplifier-core", + "version": "1.0.10", + "main": "index.js", + "types": "index.d.ts", + "scripts": { + "build": "napi build --release --platform", + "build:debug": "napi build --platform", + "test": "vitest run" + }, + "napi": { + "name": "amplifier-core", + "triples": {} + }, + "devDependencies": { + "@napi-rs/cli": "^2", + "vitest": "^3", + "typescript": "^5" + } +} diff --git a/bindings/node/src/lib.rs b/bindings/node/src/lib.rs new file mode 100644 index 0000000..61931e3 --- /dev/null +++ b/bindings/node/src/lib.rs @@ -0,0 +1,993 @@ +//! # amplifier-core Node.js bindings (Napi-RS) +//! +//! This module defines the FFI type contract between Rust and Node.js. +//! The enums and structs here are the authoritative boundary types — keep +//! the `From` impls in sync whenever upstream `amplifier_core::models` changes. +//! +//! Planned classes: +//! +//! | Rust struct | JS class | +//! |-------------------|----------------------| +//! | Session | JsSession | +//! | HookRegistry | JsHookRegistry | +//! | CancellationToken | JsCancellationToken | +//! | Coordinator | JsCoordinator | + +#[macro_use] +extern crate napi_derive; + +use std::collections::HashMap; +use std::future::Future; +use std::pin::Pin; +use std::sync::Arc; + +use napi::bindgen_prelude::Promise; +use napi::bindgen_prelude::*; +use napi::threadsafe_function::{ErrorStrategy, ThreadSafeCallContext, ThreadsafeFunction}; +use tokio::sync::Mutex; + +use amplifier_core::errors::HookError; +use amplifier_core::models as core_models; +use amplifier_core::models::HookResult; +use amplifier_core::traits::HookHandler; + +#[napi] +pub fn hello() -> String { + "Hello from amplifier-core native addon!".to_string() +} + +// --------------------------------------------------------------------------- +// Enums — exported as TypeScript string unions via #[napi(string_enum)] +// --------------------------------------------------------------------------- + +#[napi(string_enum)] +pub enum HookAction { + Continue, + Deny, + Modify, + InjectContext, + AskUser, +} + +#[napi(string_enum)] +pub enum SessionState { + Running, + Completed, + Failed, + Cancelled, +} + +#[napi(string_enum)] +pub enum ContextInjectionRole { + System, + User, + Assistant, +} + +#[napi(string_enum)] +pub enum ApprovalDefault { + Allow, + Deny, +} + +#[napi(string_enum)] +pub enum UserMessageLevel { + Info, + Warning, + Error, +} + +#[napi(string_enum)] +pub enum Role { + System, + Developer, + User, + Assistant, + Function, + Tool, +} + +// --------------------------------------------------------------------------- +// Bidirectional From conversions: HookAction <-> amplifier_core::models::HookAction +// --------------------------------------------------------------------------- + +impl From for HookAction { + fn from(action: core_models::HookAction) -> Self { + match action { + core_models::HookAction::Continue => HookAction::Continue, + core_models::HookAction::Deny => HookAction::Deny, + core_models::HookAction::Modify => HookAction::Modify, + core_models::HookAction::InjectContext => HookAction::InjectContext, + core_models::HookAction::AskUser => HookAction::AskUser, + } + } +} + +impl From for core_models::HookAction { + fn from(action: HookAction) -> Self { + match action { + HookAction::Continue => core_models::HookAction::Continue, + HookAction::Deny => core_models::HookAction::Deny, + HookAction::Modify => core_models::HookAction::Modify, + HookAction::InjectContext => core_models::HookAction::InjectContext, + HookAction::AskUser => core_models::HookAction::AskUser, + } + } +} + +// --------------------------------------------------------------------------- +// Bidirectional From conversions: SessionState <-> amplifier_core::models::SessionState +// --------------------------------------------------------------------------- + +impl From for SessionState { + fn from(state: core_models::SessionState) -> Self { + match state { + core_models::SessionState::Running => SessionState::Running, + core_models::SessionState::Completed => SessionState::Completed, + core_models::SessionState::Failed => SessionState::Failed, + core_models::SessionState::Cancelled => SessionState::Cancelled, + } + } +} + +impl From for core_models::SessionState { + fn from(state: SessionState) -> Self { + match state { + SessionState::Running => core_models::SessionState::Running, + SessionState::Completed => core_models::SessionState::Completed, + SessionState::Failed => core_models::SessionState::Failed, + SessionState::Cancelled => core_models::SessionState::Cancelled, + } + } +} + +// --------------------------------------------------------------------------- +// Bidirectional From conversions: ContextInjectionRole +// --------------------------------------------------------------------------- + +impl From for ContextInjectionRole { + fn from(role: core_models::ContextInjectionRole) -> Self { + match role { + core_models::ContextInjectionRole::System => ContextInjectionRole::System, + core_models::ContextInjectionRole::User => ContextInjectionRole::User, + core_models::ContextInjectionRole::Assistant => ContextInjectionRole::Assistant, + } + } +} + +impl From for core_models::ContextInjectionRole { + fn from(role: ContextInjectionRole) -> Self { + match role { + ContextInjectionRole::System => core_models::ContextInjectionRole::System, + ContextInjectionRole::User => core_models::ContextInjectionRole::User, + ContextInjectionRole::Assistant => core_models::ContextInjectionRole::Assistant, + } + } +} + +// --------------------------------------------------------------------------- +// Bidirectional From conversions: UserMessageLevel +// --------------------------------------------------------------------------- + +impl From for UserMessageLevel { + fn from(level: core_models::UserMessageLevel) -> Self { + match level { + core_models::UserMessageLevel::Info => UserMessageLevel::Info, + core_models::UserMessageLevel::Warning => UserMessageLevel::Warning, + core_models::UserMessageLevel::Error => UserMessageLevel::Error, + } + } +} + +impl From for core_models::UserMessageLevel { + fn from(level: UserMessageLevel) -> Self { + match level { + UserMessageLevel::Info => core_models::UserMessageLevel::Info, + UserMessageLevel::Warning => core_models::UserMessageLevel::Warning, + UserMessageLevel::Error => core_models::UserMessageLevel::Error, + } + } +} + +// --------------------------------------------------------------------------- +// Bidirectional From conversions: ApprovalDefault +// --------------------------------------------------------------------------- + +impl From for ApprovalDefault { + fn from(default: core_models::ApprovalDefault) -> Self { + match default { + core_models::ApprovalDefault::Allow => ApprovalDefault::Allow, + core_models::ApprovalDefault::Deny => ApprovalDefault::Deny, + } + } +} + +impl From for core_models::ApprovalDefault { + fn from(default: ApprovalDefault) -> Self { + match default { + ApprovalDefault::Allow => core_models::ApprovalDefault::Allow, + ApprovalDefault::Deny => core_models::ApprovalDefault::Deny, + } + } +} + +// --------------------------------------------------------------------------- +// Structs — exported as TypeScript interfaces via #[napi(object)] +// --------------------------------------------------------------------------- + +#[napi(object)] +pub struct JsToolResult { + pub success: bool, + pub output: Option, + pub error: Option, +} + +#[napi(object)] +pub struct JsToolSpec { + pub name: String, + pub description: Option, + pub parameters_json: String, +} + +#[napi(object)] +pub struct JsHookResult { + pub action: HookAction, + pub reason: Option, + pub context_injection: Option, + pub context_injection_role: Option, + pub ephemeral: Option, + pub suppress_output: Option, + pub user_message: Option, + pub user_message_level: Option, + pub user_message_source: Option, + pub approval_prompt: Option, + pub approval_timeout: Option, + pub approval_default: Option, +} + +#[napi(object)] +pub struct JsSessionConfig { + pub config_json: String, +} + +// --------------------------------------------------------------------------- +// Classes — exported as TypeScript classes via #[napi] +// --------------------------------------------------------------------------- + +/// Wraps `amplifier_core::CancellationToken` for Node.js. +/// +/// State machine: None → Graceful → Immediate, with reset back to None. +#[napi] +pub struct JsCancellationToken { + inner: amplifier_core::CancellationToken, +} + +impl Default for JsCancellationToken { + fn default() -> Self { + Self::new() + } +} + +#[napi] +impl JsCancellationToken { + #[napi(constructor)] + pub fn new() -> Self { + Self { + inner: amplifier_core::CancellationToken::new(), + } + } + + /// Internal factory for wrapping an existing kernel token. + pub fn from_inner(inner: amplifier_core::CancellationToken) -> Self { + Self { inner } + } + + #[napi(getter)] + pub fn is_cancelled(&self) -> bool { + self.inner.is_cancelled() + } + + #[napi(getter)] + pub fn is_graceful(&self) -> bool { + self.inner.is_graceful() + } + + #[napi(getter)] + pub fn is_immediate(&self) -> bool { + self.inner.is_immediate() + } + + #[napi] + pub fn request_graceful(&self, _reason: Option) { + self.inner.request_graceful(); + } + + #[napi] + pub fn request_immediate(&self, _reason: Option) { + self.inner.request_immediate(); + } + + #[napi] + pub fn reset(&self) { + self.inner.reset(); + } +} + +// --------------------------------------------------------------------------- +// JsHookHandlerBridge — lets JS functions act as Rust HookHandler trait objects +// --------------------------------------------------------------------------- + +/// Bridges a JS callback function to the Rust `HookHandler` trait via +/// `ThreadsafeFunction`. The callback receives `(event: string, data: string)` +/// and returns a JSON string representing a `HookResult`. +struct JsHookHandlerBridge { + callback: ThreadsafeFunction<(String, String), ErrorStrategy::Fatal>, +} + +// Safety: ThreadsafeFunction is designed for cross-thread use in napi-rs. +unsafe impl Send for JsHookHandlerBridge {} +unsafe impl Sync for JsHookHandlerBridge {} + +impl HookHandler for JsHookHandlerBridge { + fn handle( + &self, + event: &str, + data: serde_json::Value, + ) -> Pin> + Send + '_>> { + let event = event.to_string(); + let data_str = serde_json::to_string(&data).unwrap_or_else(|e| { + eprintln!( + "amplifier-core-node: failed to serialize hook data to JSON: {e}. Defaulting to empty object." + ); + "{}".to_string() + }); + Box::pin(async move { + let result_str: String = + self.callback + .call_async((event, data_str)) + .await + .map_err(|e| HookError::HandlerFailed { + message: e.to_string(), + handler_name: None, + })?; + let hook_result: HookResult = serde_json::from_str(&result_str).unwrap_or_else(|e| { + log::error!( + "SECURITY: Hook handler returned unparseable result — failing closed (Deny): {e} — json: {result_str}" + ); + HookResult { + action: core_models::HookAction::Deny, + reason: Some("Hook handler returned invalid response".to_string()), + ..Default::default() + } + }); + Ok(hook_result) + }) + } +} + +// --------------------------------------------------------------------------- +// HookResult converter +// --------------------------------------------------------------------------- + +fn hook_result_to_js(result: HookResult) -> JsHookResult { + JsHookResult { + action: result.action.into(), + reason: result.reason, + context_injection: result.context_injection, + context_injection_role: Some(result.context_injection_role.into()), + ephemeral: Some(result.ephemeral), + suppress_output: Some(result.suppress_output), + user_message: result.user_message, + user_message_level: Some(result.user_message_level.into()), + user_message_source: result.user_message_source, + approval_prompt: result.approval_prompt, + approval_timeout: Some(result.approval_timeout), + approval_default: Some(result.approval_default.into()), + } +} + +// --------------------------------------------------------------------------- +// JsHookRegistry — wraps amplifier_core::HookRegistry for Node.js +// --------------------------------------------------------------------------- + +/// Wraps `amplifier_core::HookRegistry` for Node.js. +/// +/// Provides register/emit/listHandlers/setDefaultFields — the event backbone +/// of the kernel. +#[napi] +pub struct JsHookRegistry { + pub(crate) inner: Arc, +} + +impl Default for JsHookRegistry { + fn default() -> Self { + Self::new() + } +} + +#[napi] +impl JsHookRegistry { + #[napi(constructor)] + pub fn new() -> Self { + Self { + inner: Arc::new(amplifier_core::HookRegistry::new()), + } + } + + /// Creates a new **detached** (empty) registry. + /// + /// Unlike `JsCancellationToken::from_inner`, HookRegistry cannot be cheaply + /// cloned or wrapped from a reference, so this always creates an empty + /// registry. When Coordinator manages ownership, this should accept + /// `Arc` to share state. + pub fn new_detached() -> Self { + Self { + inner: Arc::new(amplifier_core::HookRegistry::new()), + } + } + + /// Register a hook handler for the given event name. + /// + /// ## Handler signature + /// + /// The `handler` callback receives two string arguments and must return a + /// JSON-serialized `HookResult` (or a `Promise` that resolves to one): + /// + /// ```ts + /// (event: string, dataJson: string) => string | Promise + /// ``` + /// + /// Where the return value is a JSON string matching the `JsHookResult` + /// shape, e.g. `'{"action":"Continue"}'`. If the handler returns an + /// invalid JSON string, the kernel fails closed and treats it as `Deny`. + #[napi] + pub fn register( + &self, + event: String, + handler: JsFunction, + priority: i32, + name: String, + ) -> Result<()> { + let tsfn: ThreadsafeFunction<(String, String), ErrorStrategy::Fatal> = handler + .create_threadsafe_function(0, |ctx: ThreadSafeCallContext<(String, String)>| { + let event_str = ctx.env.create_string(&ctx.value.0)?; + let data_str = ctx.env.create_string(&ctx.value.1)?; + Ok(vec![event_str.into_unknown(), data_str.into_unknown()]) + })?; + + let bridge = JsHookHandlerBridge { callback: tsfn }; + // HandlerId unused — unregister not yet exposed to JS + let _ = self + .inner + .register(&event, Arc::new(bridge), priority, Some(name)); + Ok(()) + } + + #[napi] + pub async fn emit(&self, event: String, data_json: String) -> Result { + let data: serde_json::Value = + serde_json::from_str(&data_json).map_err(|e| Error::from_reason(e.to_string()))?; + let result = self.inner.emit(&event, data).await; + Ok(hook_result_to_js(result)) + } + + #[napi] + pub fn list_handlers(&self) -> HashMap> { + self.inner.list_handlers(None) + } + + #[napi] + pub fn set_default_fields(&self, defaults_json: String) -> Result<()> { + let defaults: serde_json::Value = + serde_json::from_str(&defaults_json).map_err(|e| Error::from_reason(e.to_string()))?; + self.inner.set_default_fields(defaults); + Ok(()) + } +} + +// --------------------------------------------------------------------------- +// JsCoordinator — wraps amplifier_core::Coordinator for Node.js +// --------------------------------------------------------------------------- + +/// Wraps `amplifier_core::Coordinator` for Node.js — the central hub holding +/// module mount points, capabilities, hook registry, cancellation token, and config. +/// +/// Implements the hybrid coordinator pattern: JS-side storage for TS module +/// objects, Rust kernel for everything else. +#[napi] +pub struct JsCoordinator { + pub(crate) inner: Arc, +} + +#[napi] +impl JsCoordinator { + #[napi(constructor)] + pub fn new(config_json: String) -> Result { + let config: HashMap = + serde_json::from_str(&config_json).map_err(|e| Error::from_reason(e.to_string()))?; + Ok(Self { + inner: Arc::new(amplifier_core::Coordinator::new(config)), + }) + } + + #[napi(getter)] + pub fn tool_names(&self) -> Vec { + self.inner.tool_names() + } + + #[napi(getter)] + pub fn provider_names(&self) -> Vec { + self.inner.provider_names() + } + + #[napi(getter)] + pub fn has_orchestrator(&self) -> bool { + self.inner.has_orchestrator() + } + + #[napi(getter)] + pub fn has_context(&self) -> bool { + self.inner.has_context() + } + + #[napi] + pub fn register_capability(&self, name: String, value_json: String) -> Result<()> { + let value: serde_json::Value = + serde_json::from_str(&value_json).map_err(|e| Error::from_reason(e.to_string()))?; + self.inner.register_capability(&name, value); + Ok(()) + } + + #[napi] + pub fn get_capability(&self, name: String) -> Result> { + match self.inner.get_capability(&name) { + Some(v) => serde_json::to_string(&v) + .map(Some) + .map_err(|e| Error::from_reason(e.to_string())), + None => Ok(None), + } + } + + /// Creates a new **detached** (empty) JsHookRegistry. + /// + /// ⚠️ **Each call returns a brand-new, empty registry** — hooks registered + /// on one instance are invisible to the next. This is a known limitation: + /// `Coordinator` owns its `HookRegistry` by value, not behind `Arc`, so + /// the binding cannot share state across calls. + /// + /// The method name (`createHookRegistry`) intentionally signals "creates new + /// instance" — a getter property would imply referential stability in JS. + /// + /// **Workaround:** create a `JsHookRegistry` directly and hold a reference. + /// + /// Future TODO #1: restructure the kernel to hold `Arc` inside + /// `Coordinator` so this method can share the same registry instance. + #[napi] + pub fn create_hook_registry(&self) -> JsHookRegistry { + log::warn!( + "JsCoordinator::createHookRegistry() — returns a new detached HookRegistry; \ + hooks registered on one call are NOT visible via the Coordinator's internal \ + registry. Hold the returned instance directly. (Future TODO #1)" + ); + JsHookRegistry::new_detached() + } + + #[napi(getter)] + pub fn cancellation(&self) -> JsCancellationToken { + JsCancellationToken::from_inner(self.inner.cancellation().clone()) + } + + #[napi(getter)] + pub fn config(&self) -> Result { + serde_json::to_string(self.inner.config()).map_err(|e| Error::from_reason(e.to_string())) + } + + #[napi] + pub fn reset_turn(&self) { + self.inner.reset_turn(); + } + + #[napi] + pub fn to_dict(&self) -> HashMap { + self.inner.to_dict() + } + + #[napi] + pub async fn cleanup(&self) -> Result<()> { + self.inner.cleanup().await; + Ok(()) + } +} + +// --------------------------------------------------------------------------- +// JsAmplifierSession — wraps amplifier_core::Session for Node.js +// --------------------------------------------------------------------------- + +/// Wraps `amplifier_core::Session` for Node.js — the top-level entry point. +/// +/// Lifecycle: `new AmplifierSession(config) → initialize() → execute(prompt) → cleanup()`. +/// Wires together Coordinator, HookRegistry, and CancellationToken. +/// +/// Known limitation: `coordinator` getter creates a separate Coordinator instance +/// because the kernel Session owns its Coordinator by value, not behind Arc. +/// Sharing requires restructuring the Rust kernel — tracked as Future TODO #1. +#[napi] +pub struct JsAmplifierSession { + inner: Arc>, + cached_session_id: String, + cached_parent_id: Option, + cached_config: HashMap, +} + +#[napi] +impl JsAmplifierSession { + #[napi(constructor)] + pub fn new( + config_json: String, + session_id: Option, + parent_id: Option, + ) -> Result { + let value: serde_json::Value = serde_json::from_str(&config_json) + .map_err(|e| Error::from_reason(format!("Invalid config JSON: {e}")))?; + + let config = amplifier_core::SessionConfig::from_value(value.clone()) + .map_err(|e| Error::from_reason(e.to_string()))?; + + let cached_config: HashMap = serde_json::from_value(value) + .map_err(|e| Error::from_reason(format!("invalid JSON: {e}")))?; + + let session = amplifier_core::Session::new(config, session_id.clone(), parent_id.clone()); + let cached_session_id = session.session_id().to_string(); + + Ok(Self { + inner: Arc::new(Mutex::new(session)), + cached_session_id, + cached_parent_id: parent_id, + cached_config, + }) + } + + #[napi(getter)] + pub fn session_id(&self) -> &str { + &self.cached_session_id + } + + #[napi(getter)] + pub fn parent_id(&self) -> Option { + self.cached_parent_id.clone() + } + + #[napi(getter)] + pub fn is_initialized(&self) -> bool { + match self.inner.try_lock() { + Ok(session) => session.is_initialized(), + // Safe default: lock is only held during async cleanup(), which sets + // initialized to false — so false is a correct conservative fallback. + Err(_) => false, + } + } + + /// Current session lifecycle state as a lowercase string. + /// + /// Returns one of the `SessionState` variant strings: + /// - `"Running"` — session is active + /// - `"Completed"` — session finished successfully + /// - `"Failed"` — session encountered a fatal error + /// - `"Cancelled"` — session was cancelled via the cancellation token + /// + /// Falls back to `"running"` if the session lock is held during `cleanup()`. + #[napi(getter)] + pub fn status(&self) -> String { + match self.inner.try_lock() { + Ok(session) => session.status().to_string(), + // Safe default: lock is only held during async cleanup(), and sessions + // start as "running" — returning "running" during cleanup is tolerable. + Err(_) => "running".to_string(), + } + } + + /// Creates a new **fresh** JsCoordinator from this session's cached config. + /// + /// ⚠️ **Each call allocates a new Coordinator** — capabilities registered on + /// one instance are invisible to the next. This is a known limitation: + /// `Session` owns its `Coordinator` by value, not behind `Arc`, so the + /// binding cannot expose the session's live coordinator. + /// + /// The method name (`createCoordinator`) intentionally signals "creates new + /// instance" — a getter property would imply referential stability in JS. + /// + /// **Workaround:** call `createCoordinator()` once, hold the returned instance, + /// and register capabilities on it before passing it to other APIs. + /// + /// Future TODO #1: restructure the kernel to hold `Arc` inside + /// `Session` so this method can return a handle to the session's actual coordinator. + #[napi] + pub fn create_coordinator(&self) -> JsCoordinator { + log::warn!( + "JsAmplifierSession::createCoordinator() — returns a new Coordinator built from \ + cached config; capabilities registered on one call are NOT visible on the next. \ + Hold the returned instance directly. (Future TODO #1)" + ); + JsCoordinator { + inner: Arc::new(amplifier_core::Coordinator::new(self.cached_config.clone())), + } + } + + #[napi] + pub fn set_initialized(&self) { + match self.inner.try_lock() { + Ok(session) => session.set_initialized(), + // State mutation failed — unlike read-only getters, this warrants a warning. + // Lock contention only occurs during async cleanup(), so this is unlikely + // in practice, but callers should know the mutation didn't happen. + Err(_) => eprintln!( + "amplifier-core-node: set_initialized() skipped — session lock held (cleanup in progress?)" + ), + } + } + + #[napi] + pub async fn cleanup(&self) -> Result<()> { + let session = self.inner.lock().await; + session.cleanup().await; + Ok(()) + } +} + +// --------------------------------------------------------------------------- +// JsToolBridge — lets TS authors implement Tool as plain TS objects +// --------------------------------------------------------------------------- + +/// Bridges a TypeScript tool object to Rust via `ThreadsafeFunction`. +/// +/// In the hybrid coordinator pattern, these bridge objects are stored in a +/// JS-side Map (not in the Rust Coordinator). The JS orchestrator retrieves +/// them by name and calls `execute()` directly. +#[napi] +pub struct JsToolBridge { + tool_name: String, + tool_description: String, + parameters_json: String, + execute_fn: ThreadsafeFunction, +} + +#[napi] +impl JsToolBridge { + #[napi( + constructor, + ts_args_type = "name: string, description: string, parametersJson: string, executeFn: (inputJson: string) => Promise" + )] + pub fn new( + name: String, + description: String, + parameters_json: String, + execute_fn: JsFunction, + ) -> Result { + let tsfn: ThreadsafeFunction = execute_fn + .create_threadsafe_function(0, |ctx: ThreadSafeCallContext| { + let input_str = ctx.env.create_string(&ctx.value)?; + Ok(vec![input_str.into_unknown()]) + })?; + + Ok(Self { + tool_name: name, + tool_description: description, + parameters_json, + execute_fn: tsfn, + }) + } + + #[napi(getter)] + pub fn name(&self) -> &str { + &self.tool_name + } + + #[napi(getter)] + pub fn description(&self) -> &str { + &self.tool_description + } + + #[napi] + pub async fn execute(&self, input_json: String) -> Result { + let promise: Promise = self + .execute_fn + .call_async(input_json) + .await + .map_err(|e| Error::from_reason(e.to_string()))?; + promise.await.map_err(|e| Error::from_reason(e.to_string())) + } + + #[napi] + pub fn get_spec(&self) -> String { + let params: serde_json::Value = + serde_json::from_str(&self.parameters_json).unwrap_or_else(|e| { + eprintln!( + "amplifier-core-node: JsToolBridge::get_spec() failed to parse parameters_json: {e}. Defaulting to empty object." + ); + serde_json::Value::Object(serde_json::Map::new()) + }); + serde_json::json!({ + "name": self.tool_name, + "description": self.tool_description, + "parameters": params + }) + .to_string() + } +} + +// --------------------------------------------------------------------------- +// Error bridging — Rust errors → typed JS error objects +// --------------------------------------------------------------------------- + +/// Structured error object returned to JS with a typed `code` property. +#[napi(object)] +pub struct JsAmplifierError { + pub code: String, + pub message: String, +} + +/// Maps a lowercase variant name to its error code string. +/// +/// Variant mapping: +/// - `"session"` → `"SessionError"` +/// - `"tool"` → `"ToolError"` +/// - `"provider"` → `"ProviderError"` +/// - `"hook"` → `"HookError"` +/// - `"context"` → `"ContextError"` +/// - anything else → `"AmplifierError"` +fn error_code_for_variant(variant: &str) -> &'static str { + match variant { + "session" => "SessionError", + "tool" => "ToolError", + "provider" => "ProviderError", + "hook" => "HookError", + "context" => "ContextError", + _ => "AmplifierError", + } +} + +/// Converts an error variant name and message into a typed `JsAmplifierError`. +/// +/// See [`error_code_for_variant`] for the variant → code mapping. +#[napi] +pub fn amplifier_error_to_js(variant: String, message: String) -> JsAmplifierError { + let code = error_code_for_variant(&variant).to_string(); + JsAmplifierError { code, message } +} + +/// Internal helper: converts an `AmplifierError` into a `napi::Error` with a +/// `[Code] message` format suitable for crossing the FFI boundary. +/// +/// Uses [`error_code_for_variant`] for consistent code mapping. +#[allow(dead_code)] // Used when async methods expose Result across FFI +fn amplifier_error_to_napi(err: amplifier_core::errors::AmplifierError) -> napi::Error { + let (variant, msg) = match &err { + amplifier_core::errors::AmplifierError::Session(e) => ("session", e.to_string()), + amplifier_core::errors::AmplifierError::Tool(e) => ("tool", e.to_string()), + amplifier_core::errors::AmplifierError::Provider(e) => ("provider", e.to_string()), + amplifier_core::errors::AmplifierError::Hook(e) => ("hook", e.to_string()), + amplifier_core::errors::AmplifierError::Context(e) => ("context", e.to_string()), + }; + let code = error_code_for_variant(variant); + Error::from_reason(format!("[{code}] {msg}")) +} + +// --------------------------------------------------------------------------- +// Module resolver bindings (Phase 4) +// --------------------------------------------------------------------------- + +/// Result from resolving a module path. +#[napi(object)] +pub struct JsModuleManifest { + /// How the module is loaded and invoked. + /// + /// Valid values (string literal union): + /// `"python"` | `"wasm"` | `"grpc"` | `"native"` + pub transport: String, + + /// Logical role the module plays inside the kernel. + /// + /// Valid values (string literal union): + /// `"tool"` | `"hook"` | `"context"` | `"approval"` | `"provider"` | `"orchestrator"` + pub module_type: String, + + /// Artifact format used to locate or load the module. + /// + /// Valid values (string literal union): + /// `"wasm"` | `"grpc"` | `"python"` + /// + /// - `"wasm"` — `artifactPath` contains the `.wasm` component file path + /// - `"grpc"` — `endpoint` contains the gRPC service URL + /// - `"python"` — `packageName` contains the importable Python package name + pub artifact_type: String, + + /// Path to WASM artifact (present when `artifactType` is `"wasm"`). + pub artifact_path: Option, + + /// gRPC service endpoint URL (present when `artifactType` is `"grpc"`). + pub endpoint: Option, + + /// Python package name for import (present when `artifactType` is `"python"`). + pub package_name: Option, +} + +/// Resolve a module from a filesystem path. +/// +/// Returns a JsModuleManifest describing the transport, module type, and artifact. +#[napi] +pub fn resolve_module(path: String) -> Result { + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| Error::from_reason(format!("{e}")))?; + + let transport = match manifest.transport { + amplifier_core::transport::Transport::Python => "python", + amplifier_core::transport::Transport::Wasm => "wasm", + amplifier_core::transport::Transport::Grpc => "grpc", + amplifier_core::transport::Transport::Native => "native", + }; + + let module_type = match manifest.module_type { + amplifier_core::models::ModuleType::Tool => "tool", + amplifier_core::models::ModuleType::Hook => "hook", + amplifier_core::models::ModuleType::Context => "context", + amplifier_core::models::ModuleType::Approval => "approval", + amplifier_core::models::ModuleType::Provider => "provider", + amplifier_core::models::ModuleType::Orchestrator => "orchestrator", + amplifier_core::models::ModuleType::Resolver => "resolver", + }; + + let (artifact_type, artifact_path, endpoint, package_name) = match &manifest.artifact { + amplifier_core::module_resolver::ModuleArtifact::WasmBytes { path, .. } => { + ("wasm", Some(path.to_string_lossy().to_string()), None, None) + } + amplifier_core::module_resolver::ModuleArtifact::GrpcEndpoint(ep) => { + ("grpc", None, Some(ep.clone()), None) + } + amplifier_core::module_resolver::ModuleArtifact::PythonModule(name) => { + ("python", None, None, Some(name.clone())) + } + }; + + Ok(JsModuleManifest { + transport: transport.to_string(), + module_type: module_type.to_string(), + artifact_type: artifact_type.to_string(), + artifact_path, + endpoint, + package_name, + }) +} + +/// Load a WASM module from a path and return status info. +/// +/// For WASM modules: loads the component and returns module type info. +/// For Python modules: returns an error (TS host can't load Python). +#[napi] +pub fn load_wasm_from_path(path: String) -> Result { + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| Error::from_reason(format!("{e}")))?; + + if manifest.transport == amplifier_core::transport::Transport::Python { + return Err(Error::from_reason( + "Python module detected — compile to WASM or run as gRPC sidecar. \ + TypeScript hosts cannot load Python modules.", + )); + } + + if manifest.transport != amplifier_core::transport::Transport::Wasm { + return Err(Error::from_reason(format!( + "load_wasm_from_path only handles WASM modules, got transport '{:?}'", + manifest.transport + ))); + } + + let engine = amplifier_core::wasm_engine::WasmEngine::new() + .map_err(|e| Error::from_reason(format!("WASM engine creation failed: {e}")))?; + + let coordinator = std::sync::Arc::new(amplifier_core::Coordinator::new_for_test()); + let loaded = + amplifier_core::module_resolver::load_module(&manifest, engine.inner(), Some(coordinator)) + .map_err(|e| Error::from_reason(format!("Module loading failed: {e}")))?; + + Ok(format!("loaded:{}", loaded.variant_name())) +} diff --git a/bindings/node/tsconfig.json b/bindings/node/tsconfig.json new file mode 100644 index 0000000..6cadfc4 --- /dev/null +++ b/bindings/node/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "node16", + "moduleResolution": "node16", + "strict": true, + "esModuleInterop": true, + "outDir": "dist", + "declaration": true, + "types": ["vitest/globals"] + }, + "include": ["__tests__/**/*.ts"] +} diff --git a/bindings/python/Cargo.toml b/bindings/python/Cargo.toml index be85ce3..fb13ae1 100644 --- a/bindings/python/Cargo.toml +++ b/bindings/python/Cargo.toml @@ -11,8 +11,8 @@ name = "_engine" crate-type = ["cdylib", "rlib"] [dependencies] -amplifier-core = { path = "../../crates/amplifier-core" } -pyo3 = { version = "0.28", features = ["generate-import-lib"] } +amplifier-core = { path = "../../crates/amplifier-core", features = ["wasm"] } +pyo3 = { version = "0.28.2", features = ["generate-import-lib"] } pyo3-async-runtimes = { version = "0.28", features = ["tokio-runtime"] } pyo3-log = "0.13" log = "0.4" diff --git a/bindings/python/src/lib.rs b/bindings/python/src/lib.rs index e2b1350..ba5275c 100644 --- a/bindings/python/src/lib.rs +++ b/bindings/python/src/lib.rs @@ -24,7 +24,7 @@ use pyo3::types::{PyDict, PyList}; use serde_json::Value; use amplifier_core::errors::HookError; -use amplifier_core::models::HookResult; +use amplifier_core::models::{HookAction, HookResult}; use amplifier_core::traits::HookHandler; /// Wrap a future_into_py result in a Python coroutine via _async_compat._wrap(). @@ -170,10 +170,14 @@ impl HookHandler for PyHookHandlerBridge { })?; let hook_result: HookResult = serde_json::from_str(&result_json).unwrap_or_else(|e| { - log::warn!( - "Failed to parse hook handler result JSON (defaulting to Continue): {e} — json: {result_json}" + log::error!( + "SECURITY: Hook handler returned unparseable result — failing closed (Deny): {e} — json: {result_json}" ); - HookResult::default() + HookResult { + action: HookAction::Deny, + reason: Some("Hook handler returned invalid response".to_string()), + ..Default::default() + } }); Ok(hook_result) }) @@ -2606,6 +2610,300 @@ fn compute_delay( amplifier_core::retry::compute_delay(&config.inner, attempt, retry_after, delay_multiplier) } +// --------------------------------------------------------------------------- +// Module resolver bindings +// --------------------------------------------------------------------------- + +/// Resolve a module from a filesystem path. +/// +/// Returns a dict with keys: "transport", "module_type", "artifact_type", +/// and artifact-specific keys ("artifact_path", "endpoint", "package_name"). +#[pyfunction] +fn resolve_module(py: Python<'_>, path: String) -> PyResult> { + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| PyErr::new::(format!("{e}")))?; + + let dict = PyDict::new(py); + let transport_str = match manifest.transport { + amplifier_core::transport::Transport::Python => "python", + amplifier_core::transport::Transport::Wasm => "wasm", + amplifier_core::transport::Transport::Grpc => "grpc", + amplifier_core::transport::Transport::Native => "native", + }; + dict.set_item("transport", transport_str)?; + + let type_str = match manifest.module_type { + amplifier_core::ModuleType::Tool => "tool", + amplifier_core::ModuleType::Hook => "hook", + amplifier_core::ModuleType::Context => "context", + amplifier_core::ModuleType::Approval => "approval", + amplifier_core::ModuleType::Provider => "provider", + amplifier_core::ModuleType::Orchestrator => "orchestrator", + amplifier_core::ModuleType::Resolver => "resolver", + }; + dict.set_item("module_type", type_str)?; + + match &manifest.artifact { + amplifier_core::module_resolver::ModuleArtifact::WasmBytes { path, .. } => { + dict.set_item("artifact_type", "wasm")?; + dict.set_item("artifact_path", path.to_string_lossy().as_ref())?; + } + amplifier_core::module_resolver::ModuleArtifact::GrpcEndpoint(endpoint) => { + dict.set_item("artifact_type", "grpc")?; + dict.set_item("endpoint", endpoint.as_str())?; + } + amplifier_core::module_resolver::ModuleArtifact::PythonModule(name) => { + dict.set_item("artifact_type", "python")?; + dict.set_item("package_name", name.as_str())?; + } + } + + Ok(dict.unbind()) +} + +/// Ensure WASM bytes are loaded from disk when `amplifier.toml` deferred loading. +/// +/// `parse_amplifier_toml` stores `bytes: Vec::new()` with a path, deferring +/// the actual file read to the transport layer. This helper fills in the +/// bytes before handing the manifest to `load_module`. +fn ensure_wasm_bytes_loaded( + manifest: &mut amplifier_core::module_resolver::ModuleManifest, +) -> PyResult<()> { + if let amplifier_core::module_resolver::ModuleArtifact::WasmBytes { + ref mut bytes, + ref path, + } = manifest.artifact + { + if bytes.is_empty() && path.is_file() { + *bytes = std::fs::read(path).map_err(|e| { + PyErr::new::(format!( + "Failed to read WASM bytes from {}: {e}", + path.display() + )) + })?; + } + } + Ok(()) +} + +/// Load a WASM module from a resolved manifest path. +/// +/// Returns a dict with "status" = "loaded" and "module_type" on success. +/// NOTE: This function loads into a throwaway test coordinator. For production +/// use, prefer `load_and_mount_wasm` which mounts into a real coordinator. +#[pyfunction] +fn load_wasm_from_path(py: Python<'_>, path: String) -> PyResult> { + let mut manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| PyErr::new::(format!("{e}")))?; + + if manifest.transport != amplifier_core::transport::Transport::Wasm { + return Err(PyErr::new::(format!( + "load_wasm_from_path only handles WASM modules, got transport '{:?}'", + manifest.transport + ))); + } + + ensure_wasm_bytes_loaded(&mut manifest)?; + + let engine = amplifier_core::wasm_engine::WasmEngine::new().map_err(|e| { + PyErr::new::(format!("WASM engine creation failed: {e}")) + })?; + + let coordinator = std::sync::Arc::new(amplifier_core::Coordinator::new_for_test()); + let loaded = + amplifier_core::module_resolver::load_module(&manifest, engine.inner(), Some(coordinator)) + .map_err(|e| PyErr::new::(format!("Module loading failed: {e}")))?; + + let dict = PyDict::new(py); + dict.set_item("status", "loaded")?; + dict.set_item("module_type", loaded.variant_name())?; + Ok(dict.unbind()) +} + +// --------------------------------------------------------------------------- +// PyWasmTool — thin Python wrapper around a Rust Arc +// --------------------------------------------------------------------------- + +/// Python-visible wrapper for a WASM-loaded tool module. +/// +/// Bridges the Rust `Arc` trait object into Python's tool protocol, +/// so WASM tools can be mounted into a coordinator's `mount_points["tools"]` +/// dict alongside native Python tool modules. +/// +/// Exposes: `name` (property), `get_spec()` (sync), `execute(input)` (async). +#[pyclass(name = "WasmTool")] +struct PyWasmTool { + inner: Arc, +} + +// Safety: Arc is Send+Sync (required by the Tool trait bound). +unsafe impl Send for PyWasmTool {} +unsafe impl Sync for PyWasmTool {} + +#[pymethods] +impl PyWasmTool { + /// The tool's unique name (e.g., "echo-tool"). + #[getter] + fn name(&self) -> &str { + self.inner.name() + } + + /// The tool's human-readable description. + #[getter] + fn description(&self) -> &str { + self.inner.description() + } + + /// Return the tool specification as a Python dict. + /// + /// The spec contains `name`, `description`, and `input_schema` (JSON Schema). + fn get_spec(&self, py: Python<'_>) -> PyResult> { + let spec = self.inner.get_spec(); + let json_str = serde_json::to_string(&spec).map_err(|e| { + PyErr::new::(format!("Failed to serialize ToolSpec: {e}")) + })?; + let json_mod = py.import("json")?; + let dict = json_mod.call_method1("loads", (&json_str,))?; + Ok(dict.unbind()) + } + + /// Execute the tool with JSON input and return the result. + /// + /// Async method — returns a coroutine that resolves to a dict with + /// `success` (bool), `output` (any), and optional `error` (str). + fn execute<'py>( + &self, + py: Python<'py>, + input: Bound<'py, PyAny>, + ) -> PyResult> { + let inner = self.inner.clone(); + + // Convert Python input to serde_json::Value + let json_mod = py.import("json")?; + let serializable = try_model_dump(&input); + let json_str: String = json_mod + .call_method1("dumps", (&serializable,))? + .extract()?; + let value: Value = serde_json::from_str(&json_str) + .map_err(|e| PyErr::new::(format!("Invalid JSON input: {e}")))?; + + wrap_future_as_coroutine( + py, + pyo3_async_runtimes::tokio::future_into_py(py, async move { + let result = inner.execute(value).await.map_err(|e| { + PyErr::new::(format!("Tool execution failed: {e}")) + })?; + + // Convert ToolResult to Python dict + let result_json = serde_json::to_string(&result).map_err(|e| { + PyErr::new::(format!("Failed to serialize ToolResult: {e}")) + })?; + + Python::try_attach(|py| -> PyResult> { + let json_mod = py.import("json")?; + let dict = json_mod.call_method1("loads", (&result_json,))?; + Ok(dict.unbind()) + }) + .ok_or_else(|| { + PyErr::new::("Failed to attach to Python runtime") + })? + }), + ) + } + + fn __repr__(&self) -> String { + format!("", self.inner.name()) + } +} + +// --------------------------------------------------------------------------- +// load_and_mount_wasm — load WASM module and mount into a real coordinator +// --------------------------------------------------------------------------- + +/// Load a WASM module from a filesystem path and mount it into a coordinator. +/// +/// Unlike `load_wasm_from_path` (which loads into a throwaway test coordinator), +/// this function mounts the loaded module directly into the given coordinator's +/// Python-visible `mount_points` dict, making it available for orchestrator use. +/// +/// Currently supports mounting: +/// - **tool** modules → `mount_points["tools"][name]` as a `WasmTool` wrapper +/// - Other module types are loaded and validated, returning their info for +/// Python-side mounting (hooks are registered differently, etc.) +/// +/// Returns a dict with: +/// - `"status"`: `"mounted"` if mounted, `"loaded"` if loaded but not auto-mounted +/// - `"module_type"`: the detected module type string +/// - `"name"`: the module name (for tool modules) +/// +/// # Errors +/// +/// Returns `ValueError` if the path doesn't contain a WASM module. +/// Returns `RuntimeError` if engine creation or module loading fails. +#[pyfunction] +fn load_and_mount_wasm( + py: Python<'_>, + coordinator: &PyCoordinator, + path: String, +) -> PyResult> { + let mut manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| PyErr::new::(format!("{e}")))?; + + if manifest.transport != amplifier_core::transport::Transport::Wasm { + return Err(PyErr::new::(format!( + "load_and_mount_wasm only handles WASM modules, got transport '{:?}'", + manifest.transport + ))); + } + + ensure_wasm_bytes_loaded(&mut manifest)?; + + let engine = amplifier_core::wasm_engine::WasmEngine::new().map_err(|e| { + PyErr::new::(format!("WASM engine creation failed: {e}")) + })?; + + // Use the real coordinator's inner Arc for orchestrator modules + let rust_coordinator = coordinator.inner.clone(); + let loaded = amplifier_core::module_resolver::load_module( + &manifest, + engine.inner(), + Some(rust_coordinator), + ) + .map_err(|e| PyErr::new::(format!("Module loading failed: {e}")))?; + + let dict = PyDict::new(py); + dict.set_item("module_type", loaded.variant_name())?; + + match loaded { + amplifier_core::module_resolver::LoadedModule::Tool(tool) => { + let tool_name = tool.name().to_string(); + // Wrap in PyWasmTool and mount into coordinator's mount_points["tools"] + let wrapper = Py::new(py, PyWasmTool { inner: tool })?; + let mp = coordinator.mount_points.bind(py); + let tools_any = mp + .get_item("tools")? + .ok_or_else(|| PyErr::new::("mount_points missing 'tools'"))?; + let tools_dict = tools_any.cast::()?; + tools_dict.set_item(&tool_name, &wrapper)?; + dict.set_item("status", "mounted")?; + dict.set_item("name", &tool_name)?; + } + amplifier_core::module_resolver::LoadedModule::PythonDelegated { package_name } => { + // Signal to caller: this is a Python module, handle via importlib + dict.set_item("status", "delegate_to_python")?; + dict.set_item("package_name", package_name)?; + } + _ => { + // Hook, Context, Approval, Provider, Orchestrator — + // loaded and validated, but not auto-mounted. The Python + // caller should handle mounting based on module_type. + dict.set_item("status", "loaded")?; + } + } + + Ok(dict.unbind()) +} + // --------------------------------------------------------------------------- // Module registration // --------------------------------------------------------------------------- @@ -2624,8 +2922,12 @@ fn _engine(m: &Bound<'_, PyModule>) -> PyResult<()> { m.add_class::()?; m.add_class::()?; m.add_class::()?; + m.add_class::()?; m.add_function(wrap_pyfunction!(classify_error_message, m)?)?; m.add_function(wrap_pyfunction!(compute_delay, m)?)?; + m.add_function(wrap_pyfunction!(resolve_module, m)?)?; + m.add_function(wrap_pyfunction!(load_wasm_from_path, m)?)?; + m.add_function(wrap_pyfunction!(load_and_mount_wasm, m)?)?; // ----------------------------------------------------------------------- // Event constants — expose all 41 canonical events from amplifier_core @@ -2847,4 +3149,34 @@ mod tests { // pyo3_log::init exists as a function — returns ResetHandle let _: fn() -> pyo3_log::ResetHandle = pyo3_log::init; } + + /// Verify PyWasmTool wrapper type exists and can hold an Arc. + /// + /// PyWasmTool bridges WASM-loaded Rust trait objects into Python mount_points. + /// Without this wrapper, WASM modules load into throwaway coordinators and are + /// never visible to the Python session. + #[test] + fn py_wasm_tool_type_exists() { + fn _assert_type_compiles(_: &PyWasmTool) {} + } + + /// Document the contract for load_and_mount_wasm: + /// + /// - Accepts a PyCoordinator reference and a filesystem path + /// - Resolves the module manifest (auto-detects module type via amplifier.toml or .wasm inspection) + /// - Loads the WASM module via WasmEngine + /// - For tool modules: wraps in PyWasmTool and mounts into coordinator.mount_points["tools"] + /// - For other types: returns module info for Python-side mounting + /// - Returns a status dict with "status", "module_type", and optional "name" keys + /// + /// The actual function requires the Python GIL; this test documents the contract + /// and verifies the function compiles. Integration tests (Task 2) verify end-to-end. + #[test] + fn load_and_mount_wasm_contract() { + // Verify the function exists as a callable with the expected signature. + // It's a #[pyfunction] so we can't call it without the GIL, but we can + // verify the symbol compiles. + let _exists = + load_and_mount_wasm as fn(Python<'_>, &PyCoordinator, String) -> PyResult>; + } } diff --git a/bindings/python/tests/test_loader_dispatch.py b/bindings/python/tests/test_loader_dispatch.py index 21fa6b2..aff5611 100644 --- a/bindings/python/tests/test_loader_dispatch.py +++ b/bindings/python/tests/test_loader_dispatch.py @@ -1,7 +1,11 @@ """Tests for the polyglot loader dispatch module.""" import os +import sys import tempfile +from unittest.mock import MagicMock, patch + +import pytest def test_dispatch_module_exists(): @@ -81,3 +85,65 @@ def test_dispatch_reads_grpc_endpoint(): meta = _read_module_meta(tmpdir) assert meta["module"]["transport"] == "grpc" assert meta["grpc"]["endpoint"] == "localhost:50052" + + +@pytest.mark.asyncio +async def test_load_module_uses_rust_loader_for_wasm_transport(): + """load_module imports load_and_mount_wasm and returns a deferred mount callable when Rust resolver detects wasm.""" + from amplifier_core.loader_dispatch import load_module + + fake_engine = MagicMock() + fake_engine.resolve_module.return_value = {"transport": "wasm", "name": "test-wasm"} + + coordinator = MagicMock() + coordinator.loader = None + + with tempfile.TemporaryDirectory() as tmpdir: + with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): + result = await load_module("test-wasm", {}, tmpdir, coordinator) + + assert callable(result) + # load_and_mount_wasm is NOT called during load_module — it's deferred to mount time. + # The mount function captures load_and_mount_wasm and calls it when invoked. + + +@pytest.mark.asyncio +async def test_load_module_wasm_without_rust_engine_raises_not_implemented(): + """load_module raises NotImplementedError for wasm when Rust engine is not available.""" + from amplifier_core.loader_dispatch import load_module + + coordinator = MagicMock() + coordinator.loader = None + + with tempfile.TemporaryDirectory() as tmpdir: + # Write an amplifier.toml so Python fallback detects wasm + toml_path = os.path.join(tmpdir, "amplifier.toml") + with open(toml_path, "w") as f: + f.write('[module]\nname = "test"\ntype = "tool"\ntransport = "wasm"\n') + + # Setting sys.modules entry to None makes any "from pkg import X" raise ImportError + with patch.dict(sys.modules, {"amplifier_core._engine": None}): + with pytest.raises(NotImplementedError, match="Rust engine"): + await load_module("test-wasm", {}, tmpdir, coordinator) + + +@pytest.mark.asyncio +async def test_load_module_falls_back_when_rust_resolver_raises(): + """load_module falls back to Python transport detection when Rust resolver raises.""" + from amplifier_core.loader_dispatch import load_module + + fake_engine = MagicMock() + fake_engine.resolve_module.side_effect = RuntimeError("resolver blew up") + + coordinator = MagicMock() + coordinator.loader = None + + with tempfile.TemporaryDirectory() as tmpdir: + # No amplifier.toml → Python detection returns "python" → tries Python loader + with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): + # Python loader itself will fail (no real coordinator), but we just need + # to confirm it tried the Python fallback path (not raise from Rust error). + # TypeError is raised when the MagicMock coordinator's source_resolver + # returns a MagicMock that can't be awaited. + with pytest.raises((TypeError, ValueError)): + await load_module("test-mod", {}, tmpdir, coordinator) diff --git a/crates/amplifier-core/Cargo.toml b/crates/amplifier-core/Cargo.toml index 7970ece..5a13fe7 100644 --- a/crates/amplifier-core/Cargo.toml +++ b/crates/amplifier-core/Cargo.toml @@ -15,14 +15,20 @@ uuid = { version = "1", features = ["v4"] } chrono = { version = "0.4", features = ["serde"] } rand = "0.8" log = "0.4" +toml = "0.8" prost = "0.13" tonic = "0.12" tokio-stream = { version = "0.1", features = ["net"] } -wasmtime = { version = "29", optional = true } +wasmtime = { version = "42", optional = true, features = ["component-model"] } +wasmtime-wasi = { version = "42", optional = true } [features] default = [] -wasm = ["wasmtime"] +wasm = ["wasmtime", "wasmtime-wasi"] + +[dev-dependencies] +tempfile = "3" +tokio = { version = "1", features = ["rt-multi-thread", "macros"] } [build-dependencies] tonic-build = "0.12" diff --git a/crates/amplifier-core/src/bridges/grpc_approval.rs b/crates/amplifier-core/src/bridges/grpc_approval.rs index 818a91e..786da0b 100644 --- a/crates/amplifier-core/src/bridges/grpc_approval.rs +++ b/crates/amplifier-core/src/bridges/grpc_approval.rs @@ -29,25 +29,6 @@ use crate::generated::amplifier_module::approval_service_client::ApprovalService use crate::models::{ApprovalRequest, ApprovalResponse}; use crate::traits::ApprovalProvider; -// TODO(grpc-v2): proto uses bare double for timeout, so None (no timeout) and -// Some(0.0) (expire immediately) are indistinguishable on the wire. Fix requires -// changing proto to optional double timeout. - -/// Map an optional approval timeout to the wire value. -/// -/// Because the proto field is a bare `double`, `None` (no timeout) is sent as -/// `0.0` — which is indistinguishable from "expire immediately". See the -/// `TODO(grpc-v2)` above. -fn map_approval_timeout(timeout: Option) -> f64 { - timeout.unwrap_or_else(|| { - log::debug!( - "ApprovalRequest has no timeout — sending 0.0 on wire \ - (indistinguishable from 'expire immediately')" - ); - 0.0 - }) -} - /// A bridge that wraps a remote gRPC `ApprovalService` as a native [`ApprovalProvider`]. /// /// The client is held behind a [`tokio::sync::Mutex`] because @@ -85,7 +66,7 @@ impl ApprovalProvider for GrpcApprovalBridge { action: request.action, details_json, risk_level: request.risk_level, - timeout: map_approval_timeout(request.timeout), + timeout: request.timeout, }; let response = { @@ -131,17 +112,47 @@ mod tests { } #[test] - fn none_timeout_defaults_to_zero() { - // When timeout is None, the wire value should be 0.0. - let timeout: Option = None; - let result = map_approval_timeout(timeout); - assert!((result - 0.0).abs() < f64::EPSILON); + fn none_timeout_maps_to_none_on_wire() { + let proto = amplifier_module::ApprovalRequest { + tool_name: String::new(), + action: String::new(), + details_json: String::new(), + risk_level: String::new(), + timeout: None, + }; + assert_eq!(proto.timeout, None); + } + + #[test] + fn some_timeout_is_preserved_on_wire() { + let proto = amplifier_module::ApprovalRequest { + tool_name: String::new(), + action: String::new(), + details_json: String::new(), + risk_level: String::new(), + timeout: Some(30.0), + }; + assert_eq!(proto.timeout, Some(30.0)); } #[test] - fn some_timeout_is_preserved() { - let timeout: Option = Some(30.0); - let result = map_approval_timeout(timeout); - assert!((result - 30.0).abs() < f64::EPSILON); + fn zero_timeout_is_distinguishable_from_none() { + let proto = amplifier_module::ApprovalRequest { + tool_name: String::new(), + action: String::new(), + details_json: String::new(), + risk_level: String::new(), + timeout: Some(0.0), + }; + assert_eq!(proto.timeout, Some(0.0)); + // Verify None and Some(0.0) are different + let proto_none = amplifier_module::ApprovalRequest { + tool_name: String::new(), + action: String::new(), + details_json: String::new(), + risk_level: String::new(), + timeout: None, + }; + assert_ne!(proto.timeout, proto_none.timeout); } } diff --git a/crates/amplifier-core/src/bridges/grpc_context.rs b/crates/amplifier-core/src/bridges/grpc_context.rs index 7c8a509..611526e 100644 --- a/crates/amplifier-core/src/bridges/grpc_context.rs +++ b/crates/amplifier-core/src/bridges/grpc_context.rs @@ -28,6 +28,8 @@ use tonic::transport::Channel; use crate::errors::ContextError; use crate::generated::amplifier_module; use crate::generated::amplifier_module::context_service_client::ContextServiceClient; +use crate::generated::conversions::{native_message_to_proto, proto_message_to_native}; +use crate::messages::Message; use crate::traits::{ContextManager, Provider}; /// A bridge that wraps a remote gRPC `ContextService` as a native [`ContextManager`]. @@ -49,41 +51,56 @@ impl GrpcContextBridge { }) } - // TODO(grpc-v2): Message fields (role, name, tool_call_id, metadata) are not yet - // transmitted through the gRPC bridge. The native Value may contain these fields - // but they are zeroed in the proto message. Full Message conversion requires - // proto schema updates (Phase 4). + /// Convert a [`Value`] (JSON message from context storage) to a proto + /// [`amplifier_module::Message`]. + /// + /// If the value can be deserialized as a native [`Message`], the full + /// typed conversion via [`native_message_to_proto`] is used — preserving + /// `role`, `name`, `tool_call_id`, `metadata`, and all `ContentBlock` + /// variants. Values that don't parse as a `Message` (e.g. plain strings + /// stored by older code) fall back to the text-only encoding with a + /// warning log. fn value_to_proto_message(message: &Value) -> amplifier_module::Message { - log::debug!( - "Converting Value to proto Message — role, name, tool_call_id, metadata_json are not yet transmitted" - ); - let json_string = serde_json::to_string(message).unwrap_or_else(|e| { - log::warn!("Failed to serialize context message to JSON: {e} — using empty string"); - String::new() - }); - amplifier_module::Message { - role: 0, // ROLE_UNSPECIFIED — TODO(grpc-v2): map from native message role - content: Some(amplifier_module::message::Content::TextContent(json_string)), - name: String::new(), // TODO(grpc-v2): extract from native message - tool_call_id: String::new(), // TODO(grpc-v2): extract from native message - metadata_json: String::new(), // TODO(grpc-v2): extract from native message + match serde_json::from_value::(message.clone()) { + Ok(native_msg) => native_message_to_proto(native_msg), + Err(e) => { + log::warn!( + "Failed to parse context message as Message, using text-only fallback: {e}" + ); + let json_string = serde_json::to_string(message).unwrap_or_else(|ser_err| { + log::warn!( + "Failed to serialize context message to JSON: {ser_err} — using empty string" + ); + String::new() + }); + amplifier_module::Message { + role: 0, + content: Some(amplifier_module::message::Content::TextContent(json_string)), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + } + } } } - // TODO(grpc-v2): Only TextContent is handled. BlockContent and other variants - // are mapped to Null, losing data. Full ContentBlock conversion requires Phase 4. + /// Convert a proto [`amplifier_module::Message`] back to a [`Value`]. + /// + /// Uses [`proto_message_to_native`] to get a fully-typed [`Message`] (all + /// `ContentBlock` variants, `role`, `name`, `tool_call_id`, `metadata`) + /// and then serialises it to JSON via `serde_json::to_value`. Returns + /// [`Value::Null`] only when conversion fails (proto message has no + /// content, or serialisation errors). fn proto_message_to_value(msg: &lifier_module::Message) -> Value { - match &msg.content { - Some(amplifier_module::message::Content::TextContent(text)) => { - serde_json::from_str(text).unwrap_or(Value::String(text.clone())) - } - Some(_other) => { - log::debug!( - "Non-TextContent message variant encountered — mapping to Null (not yet supported)" - ); + match proto_message_to_native(msg.clone()) { + Ok(native_msg) => serde_json::to_value(native_msg).unwrap_or_else(|e| { + log::warn!("Failed to serialise native Message to Value: {e}"); + Value::Null + }), + Err(e) => { + log::warn!("Failed to convert proto Message to native: {e}"); Value::Null } - None => Value::Null, } } } @@ -117,17 +134,16 @@ impl ContextManager for GrpcContextBridge { fn get_messages_for_request( &self, token_budget: Option, - _provider: Option>, + provider: Option>, ) -> Pin, ContextError>> + Send + '_>> { Box::pin(async move { - // TODO(grpc-v2): provider_name parameter is not transmitted to the remote - // context manager. The _provider parameter is accepted but unused. - log::debug!( - "get_messages_for_request: provider_name is not transmitted through gRPC bridge" - ); + let provider_name = provider + .as_ref() + .map(|p| p.name().to_string()) + .unwrap_or_default(); let request = amplifier_module::GetMessagesForRequestParams { token_budget: token_budget.unwrap_or(0) as i32, - provider_name: String::new(), // TODO(grpc-v2): extract from _provider param + provider_name, }; let response = { @@ -234,46 +250,55 @@ mod tests { } } - // ── S-1 regression: value_to_proto_message structural gaps ───────────── + // -- S-1: value_to_proto_message fallback for non-Message values ------------ - /// value_to_proto_message stores JSON as TextContent and zeroes all other fields. + /// A plain JSON value that cannot be parsed as a Message falls back to the + /// text-only encoding with ROLE_UNSPECIFIED and empty ancillary fields. #[test] - fn value_to_proto_message_text_content_and_zeroed_fields() { + fn value_to_proto_message_non_message_value_falls_back_to_text() { let val = Value::String("hello".to_string()); let msg = GrpcContextBridge::value_to_proto_message(&val); - assert_eq!(msg.role, 0, "role should be ROLE_UNSPECIFIED (0)"); - assert_eq!(msg.name, "", "name should be empty"); - assert_eq!(msg.tool_call_id, "", "tool_call_id should be empty"); - assert_eq!(msg.metadata_json, "", "metadata_json should be empty"); + assert_eq!(msg.role, 0, "fallback role must be ROLE_UNSPECIFIED (0)"); + assert_eq!(msg.name, "", "fallback name must be empty"); + assert_eq!(msg.tool_call_id, "", "fallback tool_call_id must be empty"); + assert_eq!( + msg.metadata_json, "", + "fallback metadata_json must be empty" + ); match msg.content { Some(amplifier_module::message::Content::TextContent(text)) => { assert_eq!(text, "\"hello\""); } - other => panic!("expected TextContent, got {other:?}"), + other => panic!("expected TextContent fallback, got {other:?}"), } } - // ── S-2 regression: proto_message_to_value structural gaps ───────────── + // -- S-2: proto_message_to_value fidelity ---------------------------------- - /// TextContent round-trips through proto_message_to_value correctly. + /// A properly-encoded proto Message (role + TextContent) roundtrips through + /// proto_message_to_value — role and content are preserved faithfully. #[test] fn proto_message_to_value_text_content_roundtrip() { - let json = r#"{"role":"user","content":"hi"}"#; - let msg = amplifier_module::Message { - role: 0, - content: Some(amplifier_module::message::Content::TextContent( - json.to_string(), - )), - name: String::new(), - tool_call_id: String::new(), - metadata_json: String::new(), + use crate::messages::{Message, MessageContent, Role}; + use std::collections::HashMap; + + // Build the proto message via native_message_to_proto (same path the bridge uses). + let native = Message { + role: Role::User, + content: MessageContent::Text("hi".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), }; - let val = GrpcContextBridge::proto_message_to_value(&msg); + let proto = crate::generated::conversions::native_message_to_proto(native); + let val = GrpcContextBridge::proto_message_to_value(&proto); assert_eq!(val["role"], "user"); assert_eq!(val["content"], "hi"); } - /// None content maps to Value::Null. + /// A proto Message with no content (content == None) maps to Value::Null + /// because proto_message_to_native returns Err for missing content. #[test] fn proto_message_to_value_none_content_is_null() { let msg = amplifier_module::Message { @@ -286,12 +311,12 @@ mod tests { assert_eq!(GrpcContextBridge::proto_message_to_value(&msg), Value::Null); } - /// BlockContent (non-TextContent variant) maps to Value::Null — data loss documented - /// by TODO(grpc-v2) in the implementation. + /// A proto Message with an empty BlockContent list is decoded to a proper + /// JSON Value — no longer silently dropped as Null. #[test] - fn proto_message_to_value_block_content_is_null() { + fn proto_message_to_value_empty_block_content_is_not_null() { let msg = amplifier_module::Message { - role: 0, + role: amplifier_module::Role::User as i32, content: Some(amplifier_module::message::Content::BlockContent( amplifier_module::ContentBlockList { blocks: vec![] }, )), @@ -299,10 +324,69 @@ mod tests { tool_call_id: String::new(), metadata_json: String::new(), }; + let val = GrpcContextBridge::proto_message_to_value(&msg); + assert_ne!(val, Value::Null, "BlockContent must produce a proper Value"); + assert_eq!(val["role"], "user"); + assert_eq!(val["content"], serde_json::json!([])); + } + + // -- Full-fidelity tests --------------------------------------------------- + + /// value_to_proto_message must preserve role, name, and tool_call_id when + /// the incoming Value is a well-formed serialised Message. + #[test] + fn value_to_proto_message_preserves_role_name_and_tool_call_id() { + use crate::messages::{Message, MessageContent, Role}; + use std::collections::HashMap; + + let native = Message { + role: Role::Assistant, + content: MessageContent::Text("hello".into()), + name: Some("alice".into()), + tool_call_id: Some("call_123".into()), + metadata: None, + extensions: HashMap::new(), + }; + let val = serde_json::to_value(&native).expect("serialise Message to Value"); + let proto = GrpcContextBridge::value_to_proto_message(&val); + + // role must NOT be 0 (ROLE_UNSPECIFIED) — it should be Assistant + assert_ne!(proto.role, 0, "role must not be ROLE_UNSPECIFIED"); + assert_eq!(proto.name, "alice", "name must be preserved"); assert_eq!( - GrpcContextBridge::proto_message_to_value(&msg), - Value::Null, - "BlockContent must map to Null until grpc-v2 phase" + proto.tool_call_id, "call_123", + "tool_call_id must be preserved" ); } + + /// proto_message_to_value must produce a proper JSON Value (not Null) when + /// the proto message carries BlockContent with actual blocks. + #[test] + fn proto_message_to_value_block_content_preserved() { + let msg = amplifier_module::Message { + role: amplifier_module::Role::Assistant as i32, + content: Some(amplifier_module::message::Content::BlockContent( + amplifier_module::ContentBlockList { + blocks: vec![amplifier_module::ContentBlock { + block: Some(amplifier_module::content_block::Block::TextBlock( + amplifier_module::TextBlock { + text: "hello from block".into(), + }, + )), + visibility: 0, + }], + }, + )), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }; + let val = GrpcContextBridge::proto_message_to_value(&msg); + assert_ne!(val, Value::Null, "BlockContent must NOT become Null"); + // The role field should be correct + assert_eq!(val["role"], "assistant"); + // content should be an array with one block + assert!(val["content"].is_array()); + assert_eq!(val["content"].as_array().unwrap().len(), 1); + } } diff --git a/crates/amplifier-core/src/bridges/grpc_hook.rs b/crates/amplifier-core/src/bridges/grpc_hook.rs index 54eb6bf..c2896dc 100644 --- a/crates/amplifier-core/src/bridges/grpc_hook.rs +++ b/crates/amplifier-core/src/bridges/grpc_hook.rs @@ -51,7 +51,9 @@ impl GrpcHookBridge { } /// Convert a proto `HookResult` to a native [`models::HookResult`]. - fn proto_to_native_hook_result(proto: amplifier_module::HookResult) -> models::HookResult { + pub(crate) fn proto_to_native_hook_result( + proto: amplifier_module::HookResult, + ) -> models::HookResult { let action = match amplifier_module::HookAction::try_from(proto.action) { Ok(amplifier_module::HookAction::Continue) => models::HookAction::Continue, Ok(amplifier_module::HookAction::Modify) => models::HookAction::Modify, @@ -178,7 +180,7 @@ impl GrpcHookBridge { ephemeral: proto.ephemeral, approval_prompt, approval_options, - approval_timeout: proto.approval_timeout, + approval_timeout: proto.approval_timeout.unwrap_or(300.0), approval_default, suppress_output: proto.suppress_output, user_message, @@ -244,7 +246,7 @@ mod tests { ephemeral: false, approval_prompt: String::new(), approval_options: vec![], - approval_timeout: 0.0, + approval_timeout: None, approval_default: 0, suppress_output: false, user_message: String::new(), diff --git a/crates/amplifier-core/src/bridges/grpc_orchestrator.rs b/crates/amplifier-core/src/bridges/grpc_orchestrator.rs index 3693265..77becdc 100644 --- a/crates/amplifier-core/src/bridges/grpc_orchestrator.rs +++ b/crates/amplifier-core/src/bridges/grpc_orchestrator.rs @@ -12,7 +12,7 @@ //! use amplifier_core::traits::Orchestrator; //! use std::sync::Arc; //! -//! let bridge = GrpcOrchestratorBridge::connect("http://localhost:50051").await?; +//! let bridge = GrpcOrchestratorBridge::connect("http://localhost:50051", "session-abc").await?; //! let orchestrator: Arc = Arc::new(bridge); //! # Ok(()) //! # } @@ -36,26 +36,39 @@ use crate::traits::{ContextManager, Orchestrator, Provider, Tool}; /// The client is held behind a [`tokio::sync::Mutex`] because /// `OrchestratorServiceClient` methods take `&mut self` and we need to hold /// the lock across `.await` points. +/// +/// `session_id` is set at construction time and transmitted with every +/// `execute` call so the remote orchestrator can route KernelService +/// callbacks back to the correct session. pub struct GrpcOrchestratorBridge { client: tokio::sync::Mutex>, + session_id: String, } impl GrpcOrchestratorBridge { /// Connect to a remote orchestrator service. - pub async fn connect(endpoint: &str) -> Result> { + /// + /// # Arguments + /// + /// * `endpoint` — gRPC endpoint URL (e.g. `"http://localhost:50051"`). + /// * `session_id` — Session identifier used for KernelService callback routing. + pub async fn connect( + endpoint: &str, + session_id: &str, + ) -> Result> { let client = OrchestratorServiceClient::connect(endpoint.to_string()).await?; Ok(Self { client: tokio::sync::Mutex::new(client), + session_id: session_id.to_string(), }) } } impl Orchestrator for GrpcOrchestratorBridge { - // TODO(grpc-v2): 5 parameters (context, providers, tools, hooks, coordinator) - // are accepted by the Orchestrator trait but not transmitted through the gRPC - // bridge. The remote orchestrator must access these via the KernelService - // callback channel instead. Full parameter passing requires proto schema updates. + // Remote orchestrators access these subsystems via KernelService callbacks + // using session_id routing. The parameters are intentionally not serialized + // over gRPC. fn execute( &self, prompt: String, @@ -72,7 +85,7 @@ impl Orchestrator for GrpcOrchestratorBridge { ); let request = amplifier_module::OrchestratorExecuteRequest { prompt, - session_id: String::new(), // TODO(grpc-v2): pass session_id for callback routing + session_id: self.session_id.clone(), }; let response = { @@ -115,8 +128,8 @@ mod tests { // ── S-4 regression: execute() discards 5 parameters ────────────────────── /// execute() discards 5 parameters; the structural gap must be documented - /// with TODO(grpc-v2) comments and a log::debug!() call so the loss is - /// visible at runtime and flagged for the grpc-v2 phase. + /// with a clear doc comment and a log::debug!() call so the loss is + /// visible at runtime. /// /// NOTE: we split at the `#[cfg(test)]` boundary so the test assertions /// themselves (which reference the searched tokens as string literals) do @@ -130,17 +143,44 @@ mod tests { .next() .expect("source must contain an impl section before #[cfg(test)]"); - assert!( - impl_source.contains("// TODO(grpc-v2):"), - "execute() impl must contain a // TODO(grpc-v2): comment documenting discarded parameters" - ); assert!( impl_source.contains("log::debug!("), "execute() impl must contain a log::debug!() call for discarded parameters" ); assert!( - impl_source.contains("session_id: String::new()"), - "session_id field must be present and empty (grpc-v2 placeholder)" + impl_source.contains("KernelService"), + "execute() impl must reference KernelService in the explanation of discarded parameters" + ); + } + + /// session_id must be stored in the struct and used in execute(). + /// + /// This test verifies that the session_id placeholder (String::new()) has + /// been replaced with an actual field that is set at construction time and + /// threaded through the gRPC request for callback routing. + /// + /// NOTE: we split at the `#[cfg(test)]` boundary so the test assertions + /// themselves (which reference the searched tokens as string literals) do + /// not produce false positives. + #[test] + fn session_id_is_stored_and_used_in_execute() { + let full_source = include_str!("grpc_orchestrator.rs"); + let impl_source = full_source + .split("\n#[cfg(test)]") + .next() + .expect("source must contain an impl section before #[cfg(test)]"); + + assert!( + impl_source.contains(" session_id: String,"), + "GrpcOrchestratorBridge struct must declare a `session_id: String` field" + ); + assert!( + impl_source.contains("self.session_id"), + "execute() must use self.session_id (not a hardcoded placeholder)" + ); + assert!( + !impl_source.contains("session_id: String::new()"), + "session_id: String::new() placeholder must be removed; use self.session_id instead" ); } } diff --git a/crates/amplifier-core/src/bridges/grpc_provider.rs b/crates/amplifier-core/src/bridges/grpc_provider.rs index eb13456..609dff9 100644 --- a/crates/amplifier-core/src/bridges/grpc_provider.rs +++ b/crates/amplifier-core/src/bridges/grpc_provider.rs @@ -142,24 +142,32 @@ impl Provider for GrpcProviderBridge { fn complete( &self, - _request: ChatRequest, + request: ChatRequest, ) -> Pin> + Send + '_>> { Box::pin(async move { - // Phase 2 stub: Message ↔ proto::Message and ContentBlock ↔ - // proto::ContentBlock conversions are not yet implemented. - // Fail loudly so callers know this bridge cannot complete yet. - // Full conversion will land in Phase 4 (Task 21). - Err(ProviderError::Other { - message: "GrpcProviderBridge::complete() not yet implemented: \ - Message/ContentBlock conversion requires Phase 4" - .into(), - provider: Some(self.name.clone()), - model: None, - retry_after: None, - status_code: None, - retryable: false, - delay_multiplier: None, - }) + let proto_request = + crate::generated::conversions::native_chat_request_to_proto(&request); + + let response = { + let mut client = self.client.lock().await; + client + .complete(proto_request) + .await + .map_err(|e| ProviderError::Other { + message: format!("gRPC call failed: {e}"), + provider: Some(self.name.clone()), + model: None, + retry_after: None, + status_code: None, + retryable: false, + delay_multiplier: None, + })? + }; + + let native_response = + crate::generated::conversions::proto_chat_response_to_native(response.into_inner()); + + Ok(native_response) }) } @@ -168,6 +176,27 @@ impl Provider for GrpcProviderBridge { } } +impl GrpcProviderBridge { + /// Test-only constructor: build a bridge from a pre-built client without + /// going through `connect()` (which would require a live gRPC server). + #[cfg(test)] + fn new_for_testing(client: ProviderServiceClient, name: String) -> Self { + use crate::models::ProviderInfo; + Self { + client: tokio::sync::Mutex::new(client), + name, + info: ProviderInfo { + id: "test-provider".into(), + display_name: "Test Provider".into(), + credential_env_vars: vec![], + capabilities: vec![], + defaults: Default::default(), + config_fields: vec![], + }, + } + } +} + #[cfg(test)] mod tests { use super::*; @@ -203,4 +232,71 @@ mod tests { let result = parse_defaults_json("not-valid-json", "test-provider"); assert!(result.is_empty()); } + + /// RED test: verifies that `complete()` actually attempts a gRPC call + /// rather than returning the Phase-2 "not yet implemented" stub error. + /// + /// The bridge is pointed at a non-existent server so the call will fail + /// with a transport/connection error — NOT the old stub message. + /// + /// Before the fix: returns `ProviderError::Other { message: "… not yet + /// implemented …" }` → assertion fails (RED). + /// After the fix: returns a gRPC transport error → assertion passes (GREEN). + #[tokio::test] + async fn complete_attempts_grpc_call_not_stub() { + use crate::messages::{ChatRequest, Message, MessageContent, Role}; + use std::collections::HashMap; + + // Create a lazy channel to a port that has nothing listening. + // `connect_lazy()` defers the actual TCP connection until the first + // RPC, so creating the channel never blocks or fails. + let channel = tonic::transport::Channel::from_static("http://[::1]:50099").connect_lazy(); + let client = ProviderServiceClient::new(channel); + let bridge = GrpcProviderBridge::new_for_testing(client, "test-provider".into()); + + let request = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("hello".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let result = bridge.complete(request).await; + + // The stub returned exactly this message — after the fix the bridge + // must attempt a real RPC and return a connection/transport error. + match &result { + Err(ProviderError::Other { message, .. }) => { + assert!( + !message.contains("not yet implemented"), + "complete() returned the old stub error instead of attempting a gRPC \ + call. Got: {message}" + ); + } + Err(_) => { + // Any other ProviderError variant means a real attempt was made. + } + Ok(_) => { + // Succeeding would also be fine (highly unlikely with no server). + } + } + } } diff --git a/crates/amplifier-core/src/bridges/mod.rs b/crates/amplifier-core/src/bridges/mod.rs index b4a83bf..37a51f9 100644 --- a/crates/amplifier-core/src/bridges/mod.rs +++ b/crates/amplifier-core/src/bridges/mod.rs @@ -10,4 +10,58 @@ pub mod grpc_orchestrator; pub mod grpc_provider; pub mod grpc_tool; #[cfg(feature = "wasm")] +pub mod wasm_approval; +#[cfg(feature = "wasm")] +pub mod wasm_context; +#[cfg(feature = "wasm")] +pub mod wasm_hook; +#[cfg(feature = "wasm")] +pub mod wasm_orchestrator; +#[cfg(feature = "wasm")] +pub mod wasm_provider; +#[cfg(feature = "wasm")] pub mod wasm_tool; + +// ── WASM engine factory & resource limits ────────────────────────────── + +#[cfg(feature = "wasm")] +use std::sync::Arc; +#[cfg(feature = "wasm")] +use wasmtime::Engine; + +/// Default WASM execution limits. +#[cfg(feature = "wasm")] +pub struct WasmLimits { + /// Maximum epoch ticks before trap (at ~100 ticks/sec, 3000 = 30 seconds). + pub max_epoch_ticks: u64, + /// Maximum memory in bytes (default: 64 MB). + pub max_memory_bytes: usize, +} + +#[cfg(feature = "wasm")] +impl Default for WasmLimits { + fn default() -> Self { + Self { + max_epoch_ticks: 3000, // ~30 seconds at 100Hz + max_memory_bytes: 64 << 20, // 64 MB + } + } +} + +/// Create a wasmtime Engine with epoch interruption enabled and a background +/// ticker thread that increments the epoch every 10ms (~100Hz). +#[cfg(feature = "wasm")] +pub fn create_wasm_engine() -> Result, Box> { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + config.epoch_interruption(true); + let engine = Arc::new(Engine::new(&config)?); + + let engine_clone = Arc::clone(&engine); + std::thread::spawn(move || loop { + std::thread::sleep(std::time::Duration::from_millis(10)); + engine_clone.increment_epoch(); + }); + + Ok(engine) +} diff --git a/crates/amplifier-core/src/bridges/wasm_approval.rs b/crates/amplifier-core/src/bridges/wasm_approval.rs new file mode 100644 index 0000000..7d7f348 --- /dev/null +++ b/crates/amplifier-core/src/bridges/wasm_approval.rs @@ -0,0 +1,241 @@ +//! WASM bridge for sandboxed approval provider modules (Component Model). +//! +//! [`WasmApprovalBridge`] loads a WASM Component via wasmtime and implements the +//! [`ApprovalProvider`] trait, enabling sandboxed in-process approval decisions. The guest +//! exports `request-approval` (accepts JSON-serialized `ApprovalRequest` as bytes, +//! returns JSON-serialized `ApprovalResponse` bytes). +//! +//! Gated behind the `wasm` feature flag. + +use std::future::Future; +use std::path::Path; +use std::pin::Pin; +use std::sync::Arc; + +use wasmtime::component::Component; +use wasmtime::{Engine, Store}; + +use crate::errors::{AmplifierError, SessionError}; +use crate::models::{ApprovalRequest, ApprovalResponse}; +use crate::traits::ApprovalProvider; + +use super::wasm_tool::{create_linker_and_store, WasmState}; + +/// The WIT interface name used by `cargo component` for approval provider exports. +const INTERFACE_NAME: &str = "amplifier:modules/approval-provider@1.0.0"; + +/// Convenience alias for the wasmtime typed function handle: takes (bytes) → result(bytes, string). +type RequestApprovalFunc = wasmtime::component::TypedFunc<(Vec,), (Result, String>,)>; + +/// Shorthand for the fallible return type used by helper functions. +type WasmResult = Result>; + +/// Look up the `request-approval` typed function export from a component instance. +/// +/// Tries: +/// 1. Direct root-level export `"request-approval"` +/// 2. Nested inside the [`INTERFACE_NAME`] exported instance +fn get_request_approval_func( + instance: &wasmtime::component::Instance, + store: &mut Store, +) -> WasmResult { + // Try direct root-level export first. + if let Ok(f) = instance + .get_typed_func::<(Vec,), (Result, String>,)>(&mut *store, "request-approval") + { + return Ok(f); + } + + // Try nested inside the interface-exported instance. + let iface_idx = instance + .get_export_index(&mut *store, None, INTERFACE_NAME) + .ok_or_else(|| format!("export instance '{INTERFACE_NAME}' not found"))?; + let func_idx = instance + .get_export_index(&mut *store, Some(&iface_idx), "request-approval") + .ok_or_else(|| { + format!("export function 'request-approval' not found in '{INTERFACE_NAME}'") + })?; + let func = instance + .get_typed_func::<(Vec,), (Result, String>,)>(&mut *store, &func_idx) + .map_err(|e| format!("typed func lookup failed for 'request-approval': {e}"))?; + Ok(func) +} + +/// Helper: call the `request-approval` export on a fresh component instance. +/// +/// The request bytes must be a JSON-serialized `ApprovalRequest`. +fn call_request_approval( + engine: &Engine, + component: &Component, + request_bytes: Vec, +) -> Result, Box> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = get_request_approval_func(&instance, &mut store)?; + let (result,) = func.call(&mut store, (request_bytes,))?; + match result { + Ok(bytes) => Ok(bytes), + Err(err) => Err(err.into()), + } +} + +/// A bridge that loads a WASM Component and exposes it as a native [`ApprovalProvider`]. +/// +/// The component is compiled once and can be instantiated for each approval request. +/// `request-approval` is called per invocation inside a `spawn_blocking` task +/// (wasmtime is synchronous). Each call gets a fresh WASM instance — the bridge is stateless. +pub struct WasmApprovalBridge { + engine: Arc, + component: Component, +} + +impl WasmApprovalBridge { + /// Load a WASM approval component from raw bytes. + /// + /// Compiles the Component and caches it for reuse across `request_approval()` calls. + pub fn from_bytes( + wasm_bytes: &[u8], + engine: Arc, + ) -> Result> { + let component = Component::new(&engine, wasm_bytes)?; + Ok(Self { engine, component }) + } + + /// Convenience: load a WASM approval component from a file path. + pub fn from_file( + path: &Path, + engine: Arc, + ) -> Result> { + let bytes = + std::fs::read(path).map_err(|e| format!("failed to read {}: {e}", path.display()))?; + Self::from_bytes(&bytes, engine) + } +} + +impl ApprovalProvider for WasmApprovalBridge { + fn request_approval( + &self, + request: ApprovalRequest, + ) -> Pin> + Send + '_>> { + Box::pin(async move { + // Serialize the ApprovalRequest as JSON bytes for the WASM guest. + let request_bytes = serde_json::to_vec(&request).map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("WASM approval: failed to serialize ApprovalRequest: {e}"), + }) + })?; + + let engine = Arc::clone(&self.engine); + let component = self.component.clone(); // Component is Arc-backed, cheap clone + + let result_bytes = tokio::task::spawn_blocking(move || { + call_request_approval(&engine, &component, request_bytes) + }) + .await + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("WASM approval execution task panicked: {e}"), + }) + })? + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("WASM request-approval failed: {e}"), + }) + })?; + + let approval_response: ApprovalResponse = serde_json::from_slice(&result_bytes) + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!( + "WASM approval: failed to deserialize ApprovalResponse: {e}" + ), + }) + })?; + + Ok(approval_response) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::sync::Arc; + + /// Compile-time check: WasmApprovalBridge satisfies Arc. + /// + /// Note: the integration test in `tests/wasm_approval_e2e.rs` would have an equivalent + /// check from the *public* API surface. This one catches breakage during unit-test + /// runs without needing the integration test. + #[allow(dead_code)] + fn _assert_wasm_approval_bridge_is_approval_provider(bridge: WasmApprovalBridge) { + let _: Arc = Arc::new(bridge); + } + + /// Helper: read the auto-approve.wasm fixture bytes. + /// + /// The fixture lives at the workspace root under `tests/fixtures/wasm/`. + /// CARGO_MANIFEST_DIR points to `amplifier-core/crates/amplifier-core`, + /// so we walk up to the workspace root first. + fn auto_approve_wasm_bytes() -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + // Two candidates because the workspace root may be at different depths + // depending on how the repo is checked out: + // - 3 levels up: used as a git submodule (super-repo/amplifier-core/crates/amplifier-core) + // - 2 levels up: standalone checkout (amplifier-core/crates/amplifier-core) + let candidates = [ + manifest.join("../../../tests/fixtures/wasm/auto-approve.wasm"), + manifest.join("../../tests/fixtures/wasm/auto-approve.wasm"), + ]; + for p in &candidates { + if p.exists() { + return std::fs::read(p) + .unwrap_or_else(|e| panic!("Failed to read auto-approve.wasm at {p:?}: {e}")); + } + } + panic!( + "auto-approve.wasm not found. Tried: {:?}", + candidates + .iter() + .map(|p| p.display().to_string()) + .collect::>() + ); + } + + /// Helper: create a shared engine with component model enabled. + fn make_engine() -> Arc { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + Arc::new(Engine::new(&config).expect("engine creation failed")) + } + + /// E2E test: auto-approve.wasm always returns approved=true with a reason. + #[tokio::test] + async fn auto_approve_returns_approved_with_reason() { + let engine = make_engine(); + let bytes = auto_approve_wasm_bytes(); + let bridge = + WasmApprovalBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + let request = ApprovalRequest { + tool_name: "test-tool".to_string(), + action: "delete all files".to_string(), + details: Default::default(), + risk_level: "high".to_string(), + timeout: None, + }; + + let response = bridge.request_approval(request).await; + let response = response.expect("request_approval should succeed"); + + assert!( + response.approved, + "expected approved=true from auto-approve fixture" + ); + assert!( + response.reason.is_some(), + "expected a reason from auto-approve fixture, got None" + ); + } +} diff --git a/crates/amplifier-core/src/bridges/wasm_context.rs b/crates/amplifier-core/src/bridges/wasm_context.rs new file mode 100644 index 0000000..a96f33e --- /dev/null +++ b/crates/amplifier-core/src/bridges/wasm_context.rs @@ -0,0 +1,385 @@ +//! WASM bridge for sandboxed context manager modules (Component Model). +//! +//! [`WasmContextBridge`] loads a WASM Component via wasmtime and implements the +//! [`ContextManager`] trait, enabling sandboxed in-process context management. +//! +//! UNLIKE tool and hook bridges, this bridge is **stateful**: the same WASM instance +//! persists across all calls. This allows the context manager to maintain an internal +//! message store (e.g., the `memory-context` fixture's `Vec`). +//! +//! Gated behind the `wasm` feature flag. + +use std::future::Future; +use std::path::Path; +use std::pin::Pin; +use std::sync::Arc; + +use serde_json::Value; +use wasmtime::component::Component; +use wasmtime::{Engine, Store}; + +use crate::errors::ContextError; +use crate::traits::{ContextManager, Provider}; + +use super::wasm_tool::{create_linker_and_store, WasmState}; + +/// The WIT interface name used by `cargo component` for context manager exports. +const INTERFACE_NAME: &str = "amplifier:modules/context-manager@1.0.0"; + +/// Shorthand for the fallible return type used by helper functions. +type WasmResult = Result>; + +/// Look up a typed function export from the context manager component instance. +/// +/// Tries: +/// 1. Direct root-level export by `func_name` +/// 2. Nested inside the [`INTERFACE_NAME`] exported instance +fn get_context_func( + instance: &wasmtime::component::Instance, + store: &mut Store, + func_name: &str, +) -> WasmResult> +where + Params: wasmtime::component::Lower + wasmtime::component::ComponentNamedList, + Results: wasmtime::component::Lift + wasmtime::component::ComponentNamedList, +{ + // Try direct root-level export first. + if let Ok(f) = instance.get_typed_func::(&mut *store, func_name) { + return Ok(f); + } + + // Try nested inside the interface-exported instance. + let iface_idx = instance + .get_export_index(&mut *store, None, INTERFACE_NAME) + .ok_or_else(|| format!("export instance '{INTERFACE_NAME}' not found"))?; + let func_idx = instance + .get_export_index(&mut *store, Some(&iface_idx), func_name) + .ok_or_else(|| format!("export function '{func_name}' not found in '{INTERFACE_NAME}'"))?; + let func = instance + .get_typed_func::(&mut *store, &func_idx) + .map_err(|e| format!("typed func lookup failed for '{func_name}': {e}"))?; + Ok(func) +} + +/// A bridge that loads a WASM Component and exposes it as a native [`ContextManager`]. +/// +/// Unlike [`WasmToolBridge`] and [`WasmHookBridge`], this bridge is **stateful**. +/// The same WASM instance is reused across all calls, allowing the context manager +/// to maintain internal state (e.g., a `Vec` of messages). The store and +/// instance are protected by a [`tokio::sync::Mutex`]. +/// +/// # Concurrency note +/// +/// WASM calls are synchronous CPU-bound work. For this bridge the WASM operations +/// are in-memory (no I/O), so holding the async mutex across them is acceptable. +/// A `spawn_blocking` offload is intentionally omitted here to keep the stateful +/// borrow simple; revisit if the context WASM modules become compute-heavy. +pub struct WasmContextBridge { + /// Kept alive to ensure the engine outlives the compiled component/store. + #[allow(dead_code)] + engine: Arc, + /// Persistent (store, instance) pair — reused across every method call. + state: tokio::sync::Mutex<(Store, wasmtime::component::Instance)>, +} + +impl WasmContextBridge { + /// Load a WASM context component from raw bytes. + /// + /// Compiles the Component and creates a **single** persistent store + instance + /// that is reused for all subsequent method calls. + pub fn from_bytes( + wasm_bytes: &[u8], + engine: Arc, + ) -> Result> { + let component = Component::new(&engine, wasm_bytes)?; + let (linker, mut store) = create_linker_and_store(&engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, &component)?; + + Ok(Self { + engine, + state: tokio::sync::Mutex::new((store, instance)), + }) + } + + /// Convenience: load a WASM context component from a file path. + pub fn from_file( + path: &Path, + engine: Arc, + ) -> Result> { + let bytes = + std::fs::read(path).map_err(|e| format!("failed to read {}: {e}", path.display()))?; + Self::from_bytes(&bytes, engine) + } +} + +impl ContextManager for WasmContextBridge { + fn add_message( + &self, + message: Value, + ) -> Pin> + Send + '_>> { + Box::pin(async move { + let message_bytes = serde_json::to_vec(&message).map_err(|e| ContextError::Other { + message: format!("failed to serialize message: {e}"), + })?; + + let mut guard = self.state.lock().await; + let (store, instance) = &mut *guard; + + let func = get_context_func::<(Vec,), (Result<(), String>,)>( + instance, + store, + "add-message", + ) + .map_err(|e| ContextError::Other { + message: format!("WASM add-message lookup failed: {e}"), + })?; + + let (result,) = + func.call(store, (message_bytes,)) + .map_err(|e| ContextError::Other { + message: format!("WASM add-message call failed: {e}"), + })?; + + result.map_err(|e| ContextError::Other { + message: format!("WASM add-message returned error: {e}"), + }) + }) + } + + fn get_messages( + &self, + ) -> Pin, ContextError>> + Send + '_>> { + Box::pin(async move { + let mut guard = self.state.lock().await; + let (store, instance) = &mut *guard; + + let func = + get_context_func::<(), (Result, String>,)>(instance, store, "get-messages") + .map_err(|e| ContextError::Other { + message: format!("WASM get-messages lookup failed: {e}"), + })?; + + let (result,) = func.call(store, ()).map_err(|e| ContextError::Other { + message: format!("WASM get-messages call failed: {e}"), + })?; + + let bytes = result.map_err(|e| ContextError::Other { + message: format!("WASM get-messages returned error: {e}"), + })?; + + serde_json::from_slice::>(&bytes).map_err(|e| ContextError::Other { + message: format!("failed to deserialize messages: {e}"), + }) + }) + } + + fn get_messages_for_request( + &self, + token_budget: Option, + provider: Option>, + ) -> Pin, ContextError>> + Send + '_>> { + Box::pin(async move { + let provider_name = provider + .as_ref() + .map(|p| p.name().to_string()) + .unwrap_or_default(); + + let params = serde_json::json!({ + "token_budget": token_budget, + "provider_name": provider_name, + }); + let params_bytes = serde_json::to_vec(¶ms).map_err(|e| ContextError::Other { + message: format!("failed to serialize get-messages-for-request params: {e}"), + })?; + + let mut guard = self.state.lock().await; + let (store, instance) = &mut *guard; + + let func = get_context_func::<(Vec,), (Result, String>,)>( + instance, + store, + "get-messages-for-request", + ) + .map_err(|e| ContextError::Other { + message: format!("WASM get-messages-for-request lookup failed: {e}"), + })?; + + let (result,) = func + .call(store, (params_bytes,)) + .map_err(|e| ContextError::Other { + message: format!("WASM get-messages-for-request call failed: {e}"), + })?; + + let bytes = result.map_err(|e| ContextError::Other { + message: format!("WASM get-messages-for-request returned error: {e}"), + })?; + + serde_json::from_slice::>(&bytes).map_err(|e| ContextError::Other { + message: format!("failed to deserialize messages for request: {e}"), + }) + }) + } + + fn set_messages( + &self, + messages: Vec, + ) -> Pin> + Send + '_>> { + Box::pin(async move { + let messages_bytes = + serde_json::to_vec(&messages).map_err(|e| ContextError::Other { + message: format!("failed to serialize messages: {e}"), + })?; + + let mut guard = self.state.lock().await; + let (store, instance) = &mut *guard; + + let func = get_context_func::<(Vec,), (Result<(), String>,)>( + instance, + store, + "set-messages", + ) + .map_err(|e| ContextError::Other { + message: format!("WASM set-messages lookup failed: {e}"), + })?; + + let (result,) = + func.call(store, (messages_bytes,)) + .map_err(|e| ContextError::Other { + message: format!("WASM set-messages call failed: {e}"), + })?; + + result.map_err(|e| ContextError::Other { + message: format!("WASM set-messages returned error: {e}"), + }) + }) + } + + fn clear(&self) -> Pin> + Send + '_>> { + Box::pin(async move { + let mut guard = self.state.lock().await; + let (store, instance) = &mut *guard; + + let func = get_context_func::<(), (Result<(), String>,)>(instance, store, "clear") + .map_err(|e| ContextError::Other { + message: format!("WASM clear lookup failed: {e}"), + })?; + + let (result,) = func.call(store, ()).map_err(|e| ContextError::Other { + message: format!("WASM clear call failed: {e}"), + })?; + + result.map_err(|e| ContextError::Other { + message: format!("WASM clear returned error: {e}"), + }) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + use std::sync::Arc; + + /// Compile-time check: WasmContextBridge satisfies Arc. + /// + /// This catches breakage during unit-test runs without needing the integration test. + #[allow(dead_code)] + fn _assert_wasm_context_bridge_is_context_manager(bridge: WasmContextBridge) { + let _: Arc = Arc::new(bridge); + } + + /// Helper: read the memory-context.wasm fixture bytes. + /// + /// The fixture lives at the workspace root under `tests/fixtures/wasm/`. + /// CARGO_MANIFEST_DIR points to `amplifier-core/crates/amplifier-core`, + /// so we walk up to the workspace root first. + fn memory_context_wasm_bytes() -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + // Two candidates because the workspace root may be at different depths + // depending on how the repo is checked out: + // - 3 levels up: used as a git submodule (super-repo/amplifier-core/crates/amplifier-core) + // - 2 levels up: standalone checkout (amplifier-core/crates/amplifier-core) + let candidates = [ + manifest.join("../../../tests/fixtures/wasm/memory-context.wasm"), + manifest.join("../../tests/fixtures/wasm/memory-context.wasm"), + ]; + for p in &candidates { + if p.exists() { + return std::fs::read(p).unwrap_or_else(|e| { + panic!("Failed to read memory-context.wasm at {p:?}: {e}") + }); + } + } + panic!( + "memory-context.wasm not found. Tried: {:?}", + candidates + .iter() + .map(|p| p.display().to_string()) + .collect::>() + ); + } + + /// Helper: create a shared engine with component model enabled. + fn make_engine() -> Arc { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + Arc::new(Engine::new(&config).expect("engine creation failed")) + } + + /// E2E stateful roundtrip: add → get → add → get → clear → get. + /// + /// This test verifies that the SAME WASM instance is reused across calls. + /// If a new instance were created per call, the fixture's `MESSAGES` static + /// would reset to empty on each invocation and the counts would never grow. + #[tokio::test] + async fn memory_context_stateful_roundtrip() { + let engine = make_engine(); + let bytes = memory_context_wasm_bytes(); + let bridge = + WasmContextBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + // Initially empty. + let messages = bridge + .get_messages() + .await + .expect("get_messages should succeed"); + assert_eq!(messages.len(), 0, "expected 0 messages initially"); + + // Add first message. + bridge + .add_message(json!({"role": "user", "content": "hello"})) + .await + .expect("add_message should succeed"); + + // Should have 1 message. + let messages = bridge + .get_messages() + .await + .expect("get_messages should succeed"); + assert_eq!(messages.len(), 1, "expected 1 message after first add"); + + // Add second message. + bridge + .add_message(json!({"role": "assistant", "content": "hi"})) + .await + .expect("add_message should succeed"); + + // Should have 2 messages. + let messages = bridge + .get_messages() + .await + .expect("get_messages should succeed"); + assert_eq!(messages.len(), 2, "expected 2 messages after second add"); + + // Clear. + bridge.clear().await.expect("clear should succeed"); + + // Should be empty again. + let messages = bridge + .get_messages() + .await + .expect("get_messages should succeed"); + assert_eq!(messages.len(), 0, "expected 0 messages after clear"); + } +} diff --git a/crates/amplifier-core/src/bridges/wasm_hook.rs b/crates/amplifier-core/src/bridges/wasm_hook.rs new file mode 100644 index 0000000..65dc620 --- /dev/null +++ b/crates/amplifier-core/src/bridges/wasm_hook.rs @@ -0,0 +1,220 @@ +//! WASM bridge for sandboxed hook handler modules (Component Model). +//! +//! [`WasmHookBridge`] loads a WASM Component via wasmtime and implements the +//! [`HookHandler`] trait, enabling sandboxed in-process hook execution. The guest +//! exports `handle` (accepts a JSON envelope as bytes, returns JSON `HookResult`). +//! +//! Gated behind the `wasm` feature flag. + +use std::future::Future; +use std::path::Path; +use std::pin::Pin; +use std::sync::Arc; + +use crate::errors::HookError; +use crate::models::HookResult; +use crate::traits::HookHandler; +use serde_json::Value; +use wasmtime::component::Component; +use wasmtime::{Engine, Store}; + +use super::wasm_tool::{create_linker_and_store, WasmState}; + +/// The WIT interface name used by `cargo component` for hook handler exports. +const INTERFACE_NAME: &str = "amplifier:modules/hook-handler@1.0.0"; + +/// Convenience alias for the wasmtime typed function handle takes (bytes) → result(bytes, string). +type HandleFunc = wasmtime::component::TypedFunc<(Vec,), (Result, String>,)>; + +/// Shorthand for the fallible return type used by helper functions. +type WasmResult = Result>; + +/// Look up the `handle` typed function export from a component instance. +/// +/// Tries: +/// 1. Direct root-level export `"handle"` +/// 2. Nested inside the [`INTERFACE_NAME`] exported instance +fn get_handle_func( + instance: &wasmtime::component::Instance, + store: &mut Store, +) -> WasmResult { + // Try direct root-level export first. + if let Ok(f) = + instance.get_typed_func::<(Vec,), (Result, String>,)>(&mut *store, "handle") + { + return Ok(f); + } + + // Try nested inside the interface-exported instance. + let iface_idx = instance + .get_export_index(&mut *store, None, INTERFACE_NAME) + .ok_or_else(|| format!("export instance '{INTERFACE_NAME}' not found"))?; + let func_idx = instance + .get_export_index(&mut *store, Some(&iface_idx), "handle") + .ok_or_else(|| format!("export function 'handle' not found in '{INTERFACE_NAME}'"))?; + let func = instance + .get_typed_func::<(Vec,), (Result, String>,)>(&mut *store, &func_idx) + .map_err(|e| format!("typed func lookup failed for 'handle': {e}"))?; + Ok(func) +} + +/// Helper: call the `handle` export on a fresh component instance. +/// +/// The envelope bytes must be a JSON-serialized object: +/// `{"event": "", "data": }` +fn call_handle( + engine: &Engine, + component: &Component, + envelope_bytes: Vec, +) -> Result, Box> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = get_handle_func(&instance, &mut store)?; + let (result,) = func.call(&mut store, (envelope_bytes,))?; + match result { + Ok(bytes) => Ok(bytes), + Err(err) => Err(err.into()), + } +} + +/// A bridge that loads a WASM Component and exposes it as a native [`HookHandler`]. +/// +/// The component is compiled once and can be instantiated for each hook invocation. +/// `handle` is called per invocation inside a `spawn_blocking` task (wasmtime is synchronous). +pub struct WasmHookBridge { + engine: Arc, + component: Component, +} + +impl WasmHookBridge { + /// Load a WASM hook component from raw bytes. + /// + /// Compiles the Component and caches it for reuse across `handle()` calls. + pub fn from_bytes( + wasm_bytes: &[u8], + engine: Arc, + ) -> Result> { + let component = Component::new(&engine, wasm_bytes)?; + Ok(Self { engine, component }) + } + + /// Convenience: load a WASM hook component from a file path. + pub fn from_file( + path: &Path, + engine: Arc, + ) -> Result> { + let bytes = + std::fs::read(path).map_err(|e| format!("failed to read {}: {e}", path.display()))?; + Self::from_bytes(&bytes, engine) + } +} + +impl HookHandler for WasmHookBridge { + fn handle( + &self, + event: &str, + data: Value, + ) -> Pin> + Send + '_>> { + let event = event.to_string(); + Box::pin(async move { + // Serialize event + data as the JSON envelope the WASM guest expects. + let envelope = serde_json::json!({"event": event, "data": data}); + let envelope_bytes = serde_json::to_vec(&envelope).map_err(|e| HookError::Other { + message: format!("failed to serialize hook envelope: {e}"), + })?; + + let engine = Arc::clone(&self.engine); + let component = self.component.clone(); // Component is Arc-backed, cheap clone + + let result_bytes = tokio::task::spawn_blocking(move || { + call_handle(&engine, &component, envelope_bytes) + }) + .await + .map_err(|e| HookError::Other { + message: format!("WASM hook execution task panicked: {e}"), + })? + .map_err(|e| HookError::Other { + message: format!("WASM handle failed: {e}"), + })?; + + let hook_result: HookResult = + serde_json::from_slice(&result_bytes).map_err(|e| HookError::Other { + message: format!("failed to deserialize HookResult: {e}"), + })?; + + Ok(hook_result) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::sync::Arc; + + /// Compile-time check: WasmHookBridge satisfies Arc. + /// + /// Note: the integration test in `tests/wasm_hook_e2e.rs` would have an equivalent + /// check from the *public* API surface. This one catches breakage during unit-test + /// runs without needing the integration test. + #[allow(dead_code)] + fn _assert_wasm_hook_bridge_is_hook_handler(bridge: WasmHookBridge) { + let _: Arc = Arc::new(bridge); + } + + /// Helper: read the deny-hook.wasm fixture bytes. + /// + /// The fixture lives at the workspace root under `tests/fixtures/wasm/`. + /// CARGO_MANIFEST_DIR points to `amplifier-core/crates/amplifier-core`, + /// so we walk up to the workspace root first. + fn deny_hook_wasm_bytes() -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + // Two candidates because the workspace root may be at different depths + // depending on how the repo is checked out: + // - 3 levels up: used as a git submodule (super-repo/amplifier-core/crates/amplifier-core) + // - 2 levels up: standalone checkout (amplifier-core/crates/amplifier-core) + let candidates = [ + manifest.join("../../../tests/fixtures/wasm/deny-hook.wasm"), + manifest.join("../../tests/fixtures/wasm/deny-hook.wasm"), + ]; + for p in &candidates { + if p.exists() { + return std::fs::read(p) + .unwrap_or_else(|e| panic!("Failed to read deny-hook.wasm at {p:?}: {e}")); + } + } + panic!( + "deny-hook.wasm not found. Tried: {:?}", + candidates + .iter() + .map(|p| p.display().to_string()) + .collect::>() + ); + } + + /// Helper: create a shared engine with component model enabled. + fn make_engine() -> Arc { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + Arc::new(Engine::new(&config).expect("engine creation failed")) + } + + #[tokio::test] + async fn deny_hook_returns_deny_action() { + let engine = make_engine(); + let bytes = deny_hook_wasm_bytes(); + let bridge = WasmHookBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + let data = serde_json::json!({"key": "value"}); + let result = bridge.handle("test:event", data).await; + let result = result.expect("handle should succeed"); + + assert_eq!(result.action, crate::models::HookAction::Deny); + assert!( + result.reason.as_deref().unwrap_or("").contains("Denied"), + "expected reason to contain 'Denied', got: {:?}", + result.reason + ); + } +} diff --git a/crates/amplifier-core/src/bridges/wasm_orchestrator.rs b/crates/amplifier-core/src/bridges/wasm_orchestrator.rs new file mode 100644 index 0000000..a918417 --- /dev/null +++ b/crates/amplifier-core/src/bridges/wasm_orchestrator.rs @@ -0,0 +1,696 @@ +//! WASM bridge for sandboxed orchestrator modules (Component Model). +//! +//! [`WasmOrchestratorBridge`] loads a WASM Component via wasmtime and implements +//! the [`Orchestrator`] trait. Unlike Tier-1 bridges, the orchestrator component +//! **imports** `kernel-service` host functions that call back into the Coordinator. +//! These are registered on the [`Linker`] before instantiation. +//! +//! Gated behind the `wasm` feature flag. + +use std::collections::HashMap; +use std::future::Future; +use std::path::Path; +use std::pin::Pin; +use std::sync::Arc; + +use serde_json::Value; +use wasmtime::component::{Component, Linker}; +use wasmtime::{Engine, Store}; + +use crate::coordinator::Coordinator; +use crate::errors::{AmplifierError, SessionError}; +use crate::traits::{ContextManager, Orchestrator, Provider, Tool}; + +use super::wasm_tool::{create_linker_and_store, WasmState}; + +/// WIT interface name for the kernel-service host import (used by orchestrator guests). +const KERNEL_SERVICE_INTERFACE: &str = "amplifier:modules/kernel-service@1.0.0"; + +/// WIT interface name for the orchestrator export. +const ORCHESTRATOR_INTERFACE: &str = "amplifier:modules/orchestrator@1.0.0"; + +/// Shorthand for the typed function returned by the orchestrator `execute` export. +type OrchestratorExecuteFunc = + wasmtime::component::TypedFunc<(Vec,), (Result, String>,)>; + +/// A bridge that loads a WASM Component and exposes it as a native [`Orchestrator`]. +/// +/// The component is compiled once at construction time. Each `execute()` call: +/// 1. Creates a [`Linker`] with WASI + kernel-service host imports registered. +/// 2. Instantiates the component in a fresh [`Store`]. +/// 3. Calls the WASM `execute` export inside `spawn_blocking`. +/// +/// Host import closures use `tokio::runtime::Handle::current().block_on()` to +/// drive async coordinator operations from within the synchronous WASM context. +pub struct WasmOrchestratorBridge { + engine: Arc, + component: Component, + coordinator: Arc, +} + +impl WasmOrchestratorBridge { + /// Load a WASM orchestrator component from raw bytes. + /// + /// Compiles the Component and stores the coordinator for use in host import + /// closures. Unlike Tier-1 bridges, no eager `get-spec` call is made. + pub fn from_bytes( + wasm_bytes: &[u8], + engine: Arc, + coordinator: Arc, + ) -> Result> { + let component = Component::new(&engine, wasm_bytes)?; + Ok(Self { + engine, + component, + coordinator, + }) + } + + /// Convenience: load a WASM orchestrator component from a file path. + pub fn from_file( + path: &Path, + engine: Arc, + coordinator: Arc, + ) -> Result> { + let bytes = + std::fs::read(path).map_err(|e| format!("failed to read {}: {e}", path.display()))?; + Self::from_bytes(&bytes, engine, coordinator) + } +} + +// --------------------------------------------------------------------------- +// Kernel-service host imports +// --------------------------------------------------------------------------- + +/// Register all `kernel-service` host import functions on a component linker. +/// +/// Each function captures an `Arc` clone and dispatches to the +/// appropriate coordinator method. Async coordinator calls are driven via +/// `tokio::runtime::Handle::current().block_on()` (safe because WASM runs +/// inside `spawn_blocking` which executes on a non-async blocking thread that +/// still holds the outer Tokio runtime handle). +fn register_kernel_service_imports( + linker: &mut Linker, + coordinator: Arc, +) -> Result<(), Box> { + let mut instance = linker.instance(KERNEL_SERVICE_INTERFACE)?; + + // ------------------------------------------------------------------ + // execute-tool: func(request: list) -> result, string> + // + // Request JSON: {"name": "", "input": } + // Response JSON: serialized ToolResult + // ------------------------------------------------------------------ + { + let coord = Arc::clone(&coordinator); + instance.func_wrap( + "execute-tool", + move |_caller, + (request_bytes,): (Vec,)| + -> wasmtime::Result<(Result, String>,)> { + let result = tokio::runtime::Handle::current().block_on(async { + let req: Value = serde_json::from_slice(&request_bytes) + .map_err(|e| format!("execute-tool: bad request: {e}"))?; + let name = req + .get("name") + .and_then(|v| v.as_str()) + .ok_or_else(|| "execute-tool: missing 'name' field".to_string())?; + let input = req.get("input").cloned().unwrap_or(Value::Null); + let tool = coord + .get_tool(name) + .ok_or_else(|| format!("execute-tool: tool not found: {name}"))?; + let tool_result = tool + .execute(input) + .await + .map_err(|e| format!("execute-tool: execution failed: {e}"))?; + serde_json::to_vec(&tool_result) + .map_err(|e| format!("execute-tool: serialize failed: {e}")) + }); + Ok((result,)) + }, + )?; + } + + // ------------------------------------------------------------------ + // complete-with-provider: func(request: list) -> result, string> + // + // Request JSON: {"name": "", "request": } + // Response JSON: serialized ChatResponse + // ------------------------------------------------------------------ + { + let coord = Arc::clone(&coordinator); + instance.func_wrap( + "complete-with-provider", + move |_caller, + (request_bytes,): (Vec,)| + -> wasmtime::Result<(Result, String>,)> { + let result = tokio::runtime::Handle::current().block_on(async { + let req: Value = serde_json::from_slice(&request_bytes) + .map_err(|e| format!("complete-with-provider: bad request: {e}"))?; + let name = req.get("name").and_then(|v| v.as_str()).ok_or_else(|| { + "complete-with-provider: missing 'name' field".to_string() + })?; + let request_val = req.get("request").cloned().unwrap_or(Value::Null); + let provider = coord.get_provider(name).ok_or_else(|| { + format!("complete-with-provider: provider not found: {name}") + })?; + let chat_request: crate::messages::ChatRequest = + serde_json::from_value(request_val) + .map_err(|e| format!("complete-with-provider: bad ChatRequest: {e}"))?; + let response = provider + .complete(chat_request) + .await + .map_err(|e| format!("complete-with-provider: failed: {e}"))?; + serde_json::to_vec(&response) + .map_err(|e| format!("complete-with-provider: serialize failed: {e}")) + }); + Ok((result,)) + }, + )?; + } + + // ------------------------------------------------------------------ + // emit-hook: func(request: list) -> result, string> + // + // Request JSON: {"event": "", "data": } + // Response JSON: serialized HookResult + // ------------------------------------------------------------------ + { + let coord = Arc::clone(&coordinator); + instance.func_wrap( + "emit-hook", + move |_caller, + (request_bytes,): (Vec,)| + -> wasmtime::Result<(Result, String>,)> { + let result = tokio::runtime::Handle::current().block_on(async { + let req: Value = serde_json::from_slice(&request_bytes) + .map_err(|e| format!("emit-hook: bad request: {e}"))?; + let event = req + .get("event") + .and_then(|v| v.as_str()) + .ok_or_else(|| "emit-hook: missing 'event' field".to_string())?; + let data = req.get("data").cloned().unwrap_or(Value::Null); + let hook_result = coord.hooks().emit(event, data).await; + serde_json::to_vec(&hook_result) + .map_err(|e| format!("emit-hook: serialize failed: {e}")) + }); + Ok((result,)) + }, + )?; + } + + // ------------------------------------------------------------------ + // get-messages: func(request: list) -> result, string> + // + // Request JSON: {} (empty, request bytes are ignored) + // Response JSON: serialized Vec + // ------------------------------------------------------------------ + { + let coord = Arc::clone(&coordinator); + instance.func_wrap( + "get-messages", + move |_caller, + (_request_bytes,): (Vec,)| + -> wasmtime::Result<(Result, String>,)> { + let result = tokio::runtime::Handle::current().block_on(async { + let context = coord + .context() + .ok_or_else(|| "get-messages: no context manager mounted".to_string())?; + let messages = context + .get_messages() + .await + .map_err(|e| format!("get-messages: failed: {e}"))?; + serde_json::to_vec(&messages) + .map_err(|e| format!("get-messages: serialize failed: {e}")) + }); + Ok((result,)) + }, + )?; + } + + // ------------------------------------------------------------------ + // add-message: func(request: list) -> result<_, string> + // + // Request JSON: + // Returns unit on success. + // ------------------------------------------------------------------ + { + let coord = Arc::clone(&coordinator); + instance.func_wrap( + "add-message", + move |_caller, + (request_bytes,): (Vec,)| + -> wasmtime::Result<(Result<(), String>,)> { + let result = tokio::runtime::Handle::current().block_on(async { + let message: Value = serde_json::from_slice(&request_bytes) + .map_err(|e| format!("add-message: bad request: {e}"))?; + let context = coord + .context() + .ok_or_else(|| "add-message: no context manager mounted".to_string())?; + context + .add_message(message) + .await + .map_err(|e| format!("add-message: failed: {e}")) + }); + Ok((result,)) + }, + )?; + } + + // ------------------------------------------------------------------ + // get-capability: func(request: list) -> result, string> + // + // Request JSON: {"name": ""} + // Response JSON: serialized capability Value + // ------------------------------------------------------------------ + { + let coord = Arc::clone(&coordinator); + instance.func_wrap( + "get-capability", + move |_caller, + (request_bytes,): (Vec,)| + -> wasmtime::Result<(Result, String>,)> { + let result: Result, String> = (|| { + let req: Value = serde_json::from_slice(&request_bytes) + .map_err(|e| format!("get-capability: bad request: {e}"))?; + let name = req + .get("name") + .and_then(|v| v.as_str()) + .ok_or_else(|| "get-capability: missing 'name' field".to_string())?; + match coord.get_capability(name) { + Some(val) => serde_json::to_vec(&val) + .map_err(|e| format!("get-capability: serialize failed: {e}")), + None => Err(format!("get-capability: not found: {name}")), + } + })(); + Ok((result,)) + }, + )?; + } + + // ------------------------------------------------------------------ + // register-capability: func(request: list) -> result<_, string> + // + // Request JSON: {"name": "", "value": } + // Returns unit on success. + // ------------------------------------------------------------------ + { + let coord = Arc::clone(&coordinator); + instance.func_wrap( + "register-capability", + move |_caller, + (request_bytes,): (Vec,)| + -> wasmtime::Result<(Result<(), String>,)> { + let result: Result<(), String> = (|| { + let req: Value = serde_json::from_slice(&request_bytes) + .map_err(|e| format!("register-capability: bad request: {e}"))?; + let name = req + .get("name") + .and_then(|v| v.as_str()) + .ok_or_else(|| "register-capability: missing 'name' field".to_string())?; + let value = req.get("value").cloned().unwrap_or(Value::Null); + coord.register_capability(name, value); + Ok(()) + })(); + Ok((result,)) + }, + )?; + } + + Ok(()) +} + +// --------------------------------------------------------------------------- +// Execute export lookup +// --------------------------------------------------------------------------- + +/// Look up the `execute` export from an orchestrator component instance. +/// +/// Tries: +/// 1. Direct root-level export by `"execute"` +/// 2. Nested inside the [`ORCHESTRATOR_INTERFACE`] exported instance +fn get_execute_func( + instance: &wasmtime::component::Instance, + store: &mut Store, +) -> Result> { + // Try root-level first. + if let Ok(f) = + instance.get_typed_func::<(Vec,), (Result, String>,)>(&mut *store, "execute") + { + return Ok(f); + } + + // Try nested inside the interface-exported instance. + let iface_idx = instance + .get_export_index(&mut *store, None, ORCHESTRATOR_INTERFACE) + .ok_or_else(|| format!("export instance '{ORCHESTRATOR_INTERFACE}' not found"))?; + let func_idx = instance + .get_export_index(&mut *store, Some(&iface_idx), "execute") + .ok_or_else(|| format!("export 'execute' not found in '{ORCHESTRATOR_INTERFACE}'"))?; + let func = instance + .get_typed_func::<(Vec,), (Result, String>,)>(&mut *store, &func_idx) + .map_err(|e| format!("typed func lookup failed for 'execute': {e}"))?; + Ok(func) +} + +// --------------------------------------------------------------------------- +// Synchronous WASM call (for spawn_blocking) +// --------------------------------------------------------------------------- + +/// Run the orchestrator `execute` call synchronously. +/// +/// Creates a fresh linker (with WASI + kernel-service imports) and store, +/// instantiates the component, and calls the `execute` export. +/// Intended to be called from inside `tokio::task::spawn_blocking`. +fn call_execute_sync( + engine: &Engine, + component: &Component, + coordinator: Arc, + request_bytes: Vec, +) -> Result, Box> { + // Start with WASI-equipped linker + store. + let (mut linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + + // Extend the linker with kernel-service host imports. + register_kernel_service_imports(&mut linker, coordinator)?; + + let instance = linker.instantiate(&mut store, component)?; + let func = get_execute_func(&instance, &mut store)?; + let (result,) = func.call(&mut store, (request_bytes,))?; + match result { + Ok(bytes) => Ok(bytes), + Err(err) => Err(err.into()), + } +} + +// --------------------------------------------------------------------------- +// Orchestrator trait impl +// --------------------------------------------------------------------------- + +impl Orchestrator for WasmOrchestratorBridge { + /// Run the WASM agent loop for a single prompt. + /// + /// Only `prompt` is forwarded to the WASM guest as `{"prompt": "..."}` bytes. + /// The `context`, `providers`, `tools`, `hooks`, and `coordinator` parameters + /// are not serialized — the WASM guest accesses these via `kernel-service` + /// host import callbacks that route through `self.coordinator`. + fn execute( + &self, + prompt: String, + _context: Arc, + _providers: HashMap>, + _tools: HashMap>, + _hooks: Value, + _coordinator: Value, + ) -> Pin> + Send + '_>> { + Box::pin(async move { + log::debug!( + "WasmOrchestratorBridge::execute — context, providers, tools, hooks, and \ + coordinator parameters are not forwarded to the WASM guest; the guest uses \ + kernel-service host import callbacks routed through self.coordinator" + ); + + // Serialize request: {"prompt": "..."} + let request_bytes = serde_json::to_vec(&serde_json::json!({"prompt": prompt})) + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("failed to serialize orchestrator request: {e}"), + }) + })?; + + let engine = Arc::clone(&self.engine); + let component = self.component.clone(); // Component is Arc-backed, cheap clone + let coordinator = Arc::clone(&self.coordinator); + + let result_bytes = tokio::task::spawn_blocking(move || { + call_execute_sync(&engine, &component, coordinator, request_bytes) + }) + .await + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("WASM orchestrator task panicked: {e}"), + }) + })? + .map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("WASM orchestrator execute failed: {e}"), + }) + })?; + + // The guest macro serializes its String result as a JSON string, + // so we deserialize the bytes back into a String. + let result: String = serde_json::from_slice(&result_bytes).map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("failed to deserialize orchestrator result: {e}"), + }) + })?; + + Ok(result) + }) + } +} + +// --------------------------------------------------------------------------- +// Tests +// --------------------------------------------------------------------------- + +#[cfg(test)] +mod tests { + use super::*; + use std::sync::Arc; + + use crate::models::ToolResult; + use crate::testing::FakeTool; + + // ------------------------------------------------------------------ + // Compile-time check + // ------------------------------------------------------------------ + + /// Compile-time check: WasmOrchestratorBridge satisfies Arc. + #[allow(dead_code)] + fn _assert_wasm_orchestrator_bridge_is_orchestrator(bridge: WasmOrchestratorBridge) { + let _: Arc = Arc::new(bridge); + } + + // ------------------------------------------------------------------ + // WASM fixture helpers + // ------------------------------------------------------------------ + + /// Helper: read the passthrough-orchestrator.wasm fixture bytes. + fn passthrough_orchestrator_wasm_bytes() -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let candidates = [ + manifest.join("../../../tests/fixtures/wasm/passthrough-orchestrator.wasm"), + manifest.join("../../tests/fixtures/wasm/passthrough-orchestrator.wasm"), + ]; + for p in &candidates { + if p.exists() { + return std::fs::read(p).unwrap_or_else(|e| { + panic!("Failed to read passthrough-orchestrator.wasm at {p:?}: {e}") + }); + } + } + panic!( + "passthrough-orchestrator.wasm not found. Tried: {:?}", + candidates + .iter() + .map(|p| p.display().to_string()) + .collect::>() + ); + } + + /// Helper: read the echo-tool.wasm fixture bytes. + fn echo_tool_wasm_bytes() -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let candidates = [ + manifest.join("../../../tests/fixtures/wasm/echo-tool.wasm"), + manifest.join("../../tests/fixtures/wasm/echo-tool.wasm"), + ]; + for p in &candidates { + if p.exists() { + return std::fs::read(p) + .unwrap_or_else(|e| panic!("Failed to read echo-tool.wasm at {p:?}: {e}")); + } + } + panic!( + "echo-tool.wasm not found. Tried: {:?}", + candidates + .iter() + .map(|p| p.display().to_string()) + .collect::>() + ); + } + + /// Helper: create a shared engine with component model enabled. + fn make_engine() -> Arc { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + Arc::new(Engine::new(&config).expect("engine creation failed")) + } + + // ------------------------------------------------------------------ + // Tests + // ------------------------------------------------------------------ + + /// E2E: passthrough-orchestrator calls execute-tool via kernel-service host import. + /// + /// Setup: + /// - Coordinator with FakeTool "echo-tool" that echoes input back + /// - WasmOrchestratorBridge wrapping passthrough-orchestrator.wasm + /// + /// Flow: + /// host execute() -> WASM execute() -> kernel-service::execute-tool (host import) + /// -> coordinator.get_tool("echo-tool") -> FakeTool.execute() -> returns ToolResult + /// -> WASM serializes result.to_string() -> host deserializes -> returns String + #[tokio::test] + async fn passthrough_orchestrator_calls_echo_tool() { + let engine = make_engine(); + let bytes = passthrough_orchestrator_wasm_bytes(); + + // Build a coordinator with a FakeTool that echoes the input back. + let coordinator = Arc::new(crate::coordinator::Coordinator::new_for_test()); + let echo = Arc::new(FakeTool::with_responses( + "echo-tool", + "Echoes input back", + vec![ToolResult { + success: true, + output: Some(serde_json::json!({"prompt": "hello from test"})), + error: None, + }], + )); + coordinator.mount_tool("echo-tool", echo); + + // Create the bridge. + let bridge = WasmOrchestratorBridge::from_bytes(&bytes, engine, coordinator) + .expect("from_bytes should succeed"); + + // Execute the orchestrator. + let result = bridge + .execute( + "hello from test".to_string(), + Arc::new(crate::testing::FakeContextManager::new()), + Default::default(), + Default::default(), + serde_json::json!({}), + serde_json::json!({}), + ) + .await; + + let response = result.expect("execute should succeed"); + // The passthrough-orchestrator returns result.to_string() where result is + // the deserialized ToolResult JSON value. + assert!( + !response.is_empty(), + "expected non-empty orchestrator response" + ); + assert!( + response.contains("echo-tool") + || response.contains("prompt") + || response.contains("hello"), + "expected response to contain echoed data, got: {response}" + ); + } + + /// E2E: passthrough-orchestrator with a native FakeTool that returns default output. + /// + /// Uses FakeTool::new (no preconfigured responses) — it echoes the input JSON back. + #[tokio::test] + async fn passthrough_orchestrator_with_default_fake_tool() { + let engine = make_engine(); + let bytes = passthrough_orchestrator_wasm_bytes(); + + let coordinator = Arc::new(crate::coordinator::Coordinator::new_for_test()); + // FakeTool::new echoes input back as output when no responses are preconfigured. + coordinator.mount_tool("echo-tool", Arc::new(FakeTool::new("echo-tool", "echoes"))); + + let bridge = WasmOrchestratorBridge::from_bytes(&bytes, Arc::clone(&engine), coordinator) + .expect("from_bytes should succeed"); + + let result = bridge + .execute( + "test prompt".to_string(), + Arc::new(crate::testing::FakeContextManager::new()), + Default::default(), + Default::default(), + serde_json::json!({}), + serde_json::json!({}), + ) + .await; + + let response = result.expect("execute should succeed"); + assert!( + !response.is_empty(), + "expected non-empty response, got: {response:?}" + ); + } + + /// E2E: passthrough-orchestrator with the real WasmToolBridge (echo-tool.wasm). + /// + /// This is the full WASM-to-WASM path: + /// orchestrator WASM -> kernel-service import -> WasmToolBridge -> echo-tool WASM + #[tokio::test] + async fn passthrough_orchestrator_with_wasm_echo_tool() { + let engine = make_engine(); + let orch_bytes = passthrough_orchestrator_wasm_bytes(); + let echo_bytes = echo_tool_wasm_bytes(); + + let coordinator = Arc::new(crate::coordinator::Coordinator::new_for_test()); + + // Mount the real WasmToolBridge for echo-tool. + let echo_bridge = + super::super::wasm_tool::WasmToolBridge::from_bytes(&echo_bytes, Arc::clone(&engine)) + .expect("echo-tool bridge should load"); + coordinator.mount_tool("echo-tool", Arc::new(echo_bridge)); + + let bridge = + WasmOrchestratorBridge::from_bytes(&orch_bytes, Arc::clone(&engine), coordinator) + .expect("from_bytes should succeed"); + + let result = bridge + .execute( + "wasm-to-wasm".to_string(), + Arc::new(crate::testing::FakeContextManager::new()), + Default::default(), + Default::default(), + serde_json::json!({}), + serde_json::json!({}), + ) + .await; + + let response = result.expect("wasm-to-wasm execute should succeed"); + assert!( + !response.is_empty(), + "expected non-empty response from wasm-to-wasm path, got: {response:?}" + ); + } + + /// Error case: execute-tool fails when tool is not mounted. + #[tokio::test] + async fn passthrough_orchestrator_tool_not_found_returns_error() { + let engine = make_engine(); + let bytes = passthrough_orchestrator_wasm_bytes(); + + // Coordinator with NO tools mounted. + let coordinator = Arc::new(crate::coordinator::Coordinator::new_for_test()); + + let bridge = WasmOrchestratorBridge::from_bytes(&bytes, engine, coordinator) + .expect("from_bytes should succeed"); + + let result = bridge + .execute( + "prompt".to_string(), + Arc::new(crate::testing::FakeContextManager::new()), + Default::default(), + Default::default(), + serde_json::json!({}), + serde_json::json!({}), + ) + .await; + + // Should fail because echo-tool is not mounted. + assert!( + result.is_err(), + "expected error when tool not mounted, got: {result:?}" + ); + } +} diff --git a/crates/amplifier-core/src/bridges/wasm_provider.rs b/crates/amplifier-core/src/bridges/wasm_provider.rs new file mode 100644 index 0000000..52a5874 --- /dev/null +++ b/crates/amplifier-core/src/bridges/wasm_provider.rs @@ -0,0 +1,439 @@ +//! WASM bridge for sandboxed LLM provider modules (Component Model). +//! +//! [`WasmProviderBridge`] loads a WASM Component via wasmtime and implements the +//! [`Provider`] trait, enabling sandboxed in-process LLM completions. The guest +//! exports `get-info` (returns JSON-serialized `ProviderInfo` bytes), `list-models`, +//! `complete`, and `parse-tool-calls`. +//! +//! Gated behind the `wasm` feature flag. + +use std::future::Future; +use std::path::Path; +use std::pin::Pin; +use std::sync::Arc; + +use wasmtime::component::Component; +use wasmtime::{Engine, Store}; + +use crate::errors::ProviderError; +use crate::messages::{ChatRequest, ChatResponse, ToolCall}; +use crate::models::{ModelInfo, ProviderInfo}; +use crate::traits::Provider; + +use super::wasm_tool::{create_linker_and_store, WasmState}; + +/// The WIT interface name used by `cargo component` for provider exports. +const INTERFACE_NAME: &str = "amplifier:modules/provider@1.0.0"; + +/// Shorthand for the fallible return type used by helper functions. +type WasmResult = Result>; + +/// Convenience constructor for a non-retryable [`ProviderError::Other`]. +fn wasm_provider_error(message: String) -> ProviderError { + ProviderError::Other { + message, + provider: None, + model: None, + retry_after: None, + status_code: None, + retryable: false, + delay_multiplier: None, + } +} + +/// Look up a typed function export from the provider component instance. +/// +/// Tries: +/// 1. Direct root-level export by `func_name` +/// 2. Nested inside the [`INTERFACE_NAME`] exported instance +fn get_provider_func( + instance: &wasmtime::component::Instance, + store: &mut Store, + func_name: &str, +) -> WasmResult> +where + Params: wasmtime::component::Lower + wasmtime::component::ComponentNamedList, + Results: wasmtime::component::Lift + wasmtime::component::ComponentNamedList, +{ + // Try direct root-level export first. + if let Ok(f) = instance.get_typed_func::(&mut *store, func_name) { + return Ok(f); + } + + // Try nested inside the interface-exported instance. + let iface_idx = instance + .get_export_index(&mut *store, None, INTERFACE_NAME) + .ok_or_else(|| format!("export instance '{INTERFACE_NAME}' not found"))?; + let func_idx = instance + .get_export_index(&mut *store, Some(&iface_idx), func_name) + .ok_or_else(|| format!("export function '{func_name}' not found in '{INTERFACE_NAME}'"))?; + let func = instance + .get_typed_func::(&mut *store, &func_idx) + .map_err(|e| format!("typed func lookup failed for '{func_name}': {e}"))?; + Ok(func) +} + +/// Helper: call `get-info` on a fresh component instance. +/// +/// Returns raw JSON bytes representing the provider's `ProviderInfo`. +/// Note: `get-info` returns `list` with **no** `result<>` wrapper. +fn call_get_info(engine: &Engine, component: &Component) -> WasmResult> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = get_provider_func::<(), (Vec,)>(&instance, &mut store, "get-info")?; + let (info_bytes,) = func.call(&mut store, ())?; + Ok(info_bytes) +} + +/// Helper: call `list-models` on a fresh component instance. +/// +/// Returns raw JSON bytes representing `Vec`. +fn call_list_models(engine: &Engine, component: &Component) -> WasmResult> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = + get_provider_func::<(), (Result, String>,)>(&instance, &mut store, "list-models")?; + let (result,) = func.call(&mut store, ())?; + match result { + Ok(bytes) => Ok(bytes), + Err(err) => Err(err.into()), + } +} + +/// Helper: call `complete` on a fresh component instance. +/// +/// `request_bytes` must be a JSON-serialized `ChatRequest`. +/// Returns raw JSON bytes representing `ChatResponse`. +fn call_complete( + engine: &Engine, + component: &Component, + request_bytes: Vec, +) -> WasmResult> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = get_provider_func::<(Vec,), (Result, String>,)>( + &instance, &mut store, "complete", + )?; + let (result,) = func.call(&mut store, (request_bytes,))?; + match result { + Ok(bytes) => Ok(bytes), + Err(err) => Err(err.into()), + } +} + +/// Helper: call `parse-tool-calls` on a fresh component instance. +/// +/// `response_bytes` must be a JSON-serialized `ChatResponse`. +/// Returns raw JSON bytes representing `Vec`. +fn call_parse_tool_calls( + engine: &Engine, + component: &Component, + response_bytes: Vec, +) -> WasmResult> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = get_provider_func::<(Vec,), (Result, String>,)>( + &instance, + &mut store, + "parse-tool-calls", + )?; + let (result,) = func.call(&mut store, (response_bytes,))?; + match result { + Ok(bytes) => Ok(bytes), + Err(err) => Err(err.into()), + } +} + +/// A bridge that loads a WASM Component and exposes it as a native [`Provider`]. +/// +/// The component is compiled once and can be instantiated for each call. +/// `get-info` is called once at construction time to cache the provider name and +/// metadata. Per-call async methods (`list-models`, `complete`) run inside +/// `spawn_blocking` tasks because wasmtime is synchronous. +/// `parse_tool_calls` is a synchronous trait method; it calls WASM directly. +pub struct WasmProviderBridge { + engine: Arc, + component: Component, + /// Provider name, cached at load time from `get-info`. + name: String, + /// Provider metadata, cached at load time from `get-info`. + info: ProviderInfo, +} + +impl WasmProviderBridge { + /// Load a WASM provider component from raw bytes. + /// + /// Compiles the Component, instantiates it once to call `get-info`, + /// and caches the resulting name and provider info. + pub fn from_bytes( + wasm_bytes: &[u8], + engine: Arc, + ) -> Result> { + let component = Component::new(&engine, wasm_bytes)?; + + // Call get-info to discover the provider's name and metadata. + let info_bytes = call_get_info(&engine, &component)?; + let info: ProviderInfo = serde_json::from_slice(&info_bytes)?; + + // The guest's ProviderInfo uses `id` as the canonical identifier. + // Use it as the provider name (consistent with Python convention). + let name = info.id.clone(); + + Ok(Self { + engine, + component, + name, + info, + }) + } + + /// Convenience: load a WASM provider component from a file path. + pub fn from_file( + path: &Path, + engine: Arc, + ) -> Result> { + let bytes = + std::fs::read(path).map_err(|e| format!("failed to read {}: {e}", path.display()))?; + Self::from_bytes(&bytes, engine) + } +} + +impl Provider for WasmProviderBridge { + fn name(&self) -> &str { + &self.name + } + + fn get_info(&self) -> ProviderInfo { + self.info.clone() + } + + fn list_models( + &self, + ) -> Pin, ProviderError>> + Send + '_>> { + Box::pin(async move { + let engine = Arc::clone(&self.engine); + let component = self.component.clone(); // Component is Arc-backed, cheap clone + + let result_bytes = + tokio::task::spawn_blocking(move || call_list_models(&engine, &component)) + .await + .map_err(|e| { + wasm_provider_error(format!("WASM provider list-models task panicked: {e}")) + })? + .map_err(|e| wasm_provider_error(format!("WASM list-models failed: {e}")))?; + + let models: Vec = serde_json::from_slice(&result_bytes).map_err(|e| { + wasm_provider_error(format!( + "WASM provider: failed to deserialize Vec: {e}" + )) + })?; + + Ok(models) + }) + } + + fn complete( + &self, + request: ChatRequest, + ) -> Pin> + Send + '_>> { + Box::pin(async move { + // Serialize the ChatRequest to JSON bytes for the WASM guest. + let request_bytes = serde_json::to_vec(&request).map_err(|e| { + wasm_provider_error(format!( + "WASM provider: failed to serialize ChatRequest: {e}" + )) + })?; + + let engine = Arc::clone(&self.engine); + let component = self.component.clone(); + + let result_bytes = tokio::task::spawn_blocking(move || { + call_complete(&engine, &component, request_bytes) + }) + .await + .map_err(|e| wasm_provider_error(format!("WASM provider complete task panicked: {e}")))? + .map_err(|e| wasm_provider_error(format!("WASM complete failed: {e}")))?; + + let response: ChatResponse = serde_json::from_slice(&result_bytes).map_err(|e| { + wasm_provider_error(format!( + "WASM provider: failed to deserialize ChatResponse: {e}" + )) + })?; + + Ok(response) + }) + } + + fn parse_tool_calls(&self, response: &ChatResponse) -> Vec { + // Serialize the host ChatResponse for the WASM guest. + let response_bytes = match serde_json::to_vec(response) { + Ok(b) => b, + Err(_) => return vec![], + }; + + // Call WASM synchronously. parse_tool_calls is not async in the trait, + // and WASM parse-tool-calls is pure computation (no I/O), so this is acceptable. + let result_bytes = + match call_parse_tool_calls(&self.engine, &self.component, response_bytes) { + Ok(b) => b, + Err(_) => return vec![], + }; + + // Deserialize the result bytes as Vec. + // The WASM guest serializes its tool-call values as JSON; they must + // share the same shape as the host's ToolCall (id, name, arguments fields). + serde_json::from_slice::>(&result_bytes).unwrap_or_default() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::collections::HashMap; + use std::sync::Arc; + + use crate::messages::{Message, MessageContent, Role}; + + /// Compile-time check: WasmProviderBridge satisfies Arc. + /// + /// If the trait impl is broken this fails at compile time. + #[allow(dead_code)] + fn _assert_wasm_provider_bridge_is_provider(bridge: WasmProviderBridge) { + let _: Arc = Arc::new(bridge); + } + + /// Helper: read the echo-provider.wasm fixture bytes. + /// + /// The fixture lives at the workspace root under `tests/fixtures/wasm/`. + /// CARGO_MANIFEST_DIR points to `amplifier-core/crates/amplifier-core`, + /// so we walk up to the workspace root first. + fn echo_provider_wasm_bytes() -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + // Two candidates because the workspace root may be at different depths + // depending on how the repo is checked out: + // - 3 levels up: used as a git submodule (super-repo/amplifier-core/crates/amplifier-core) + // - 2 levels up: standalone checkout (amplifier-core/crates/amplifier-core) + let candidates = [ + manifest.join("../../../tests/fixtures/wasm/echo-provider.wasm"), + manifest.join("../../tests/fixtures/wasm/echo-provider.wasm"), + ]; + for p in &candidates { + if p.exists() { + return std::fs::read(p) + .unwrap_or_else(|e| panic!("Failed to read echo-provider.wasm at {p:?}: {e}")); + } + } + panic!( + "echo-provider.wasm not found. Tried: {:?}", + candidates + .iter() + .map(|p| p.display().to_string()) + .collect::>() + ); + } + + /// Helper: create a shared engine with component model enabled. + fn make_engine() -> Arc { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + Arc::new(Engine::new(&config).expect("engine creation failed")) + } + + /// E2E: load echo-provider.wasm and verify name(). + #[test] + fn load_echo_provider_name() { + let engine = make_engine(); + let bytes = echo_provider_wasm_bytes(); + let bridge = + WasmProviderBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + assert_eq!(bridge.name(), "echo-provider"); + } + + /// E2E: get_info() returns expected provider metadata. + #[test] + fn echo_provider_get_info() { + let engine = make_engine(); + let bytes = echo_provider_wasm_bytes(); + let bridge = + WasmProviderBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + let info = bridge.get_info(); + assert_eq!( + info.id, "echo-provider", + "expected info.id == 'echo-provider'" + ); + assert_eq!( + info.display_name, "Echo Provider", + "expected info.display_name == 'Echo Provider'" + ); + } + + /// E2E: list_models() returns at least one model with id "echo-model". + #[tokio::test] + async fn echo_provider_list_models() { + let engine = make_engine(); + let bytes = echo_provider_wasm_bytes(); + let bridge = + WasmProviderBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + let models = bridge + .list_models() + .await + .expect("list_models should succeed"); + + assert!(!models.is_empty(), "expected at least one model"); + assert!( + models.iter().any(|m| m.id == "echo-model"), + "expected a model with id 'echo-model', got: {:?}", + models.iter().map(|m| &m.id).collect::>() + ); + } + + /// E2E: complete() with minimal request returns a ChatResponse with content. + #[tokio::test] + async fn echo_provider_complete() { + let engine = make_engine(); + let bytes = echo_provider_wasm_bytes(); + let bridge = + WasmProviderBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + let request = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("hello".to_string()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: Some("echo-model".to_string()), + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let response = bridge + .complete(request) + .await + .expect("complete should succeed"); + + assert!( + !response.content.is_empty(), + "expected non-empty content in ChatResponse" + ); + } +} diff --git a/crates/amplifier-core/src/bridges/wasm_tool.rs b/crates/amplifier-core/src/bridges/wasm_tool.rs index 1f30655..256c85e 100644 --- a/crates/amplifier-core/src/bridges/wasm_tool.rs +++ b/crates/amplifier-core/src/bridges/wasm_tool.rs @@ -1,48 +1,200 @@ -//! WASM bridge for sandboxed tool modules. +//! WASM bridge for sandboxed tool modules (Component Model). //! -//! [`WasmToolBridge`] loads a compiled WASM module via wasmtime and -//! implements the [`Tool`] trait, enabling sandboxed in-process tool -//! execution with the same proto message format as gRPC. +//! [`WasmToolBridge`] loads a WASM Component via wasmtime and implements the +//! [`Tool`] trait, enabling sandboxed in-process tool execution. The guest +//! exports `get-spec` (returns JSON-serialized `ToolSpec`) and `execute` +//! (accepts JSON input, returns JSON `ToolResult`). //! //! Gated behind the `wasm` feature flag. -use std::collections::HashMap; use std::future::Future; +use std::path::Path; use std::pin::Pin; - -use serde_json::Value; +use std::sync::Arc; use crate::errors::ToolError; use crate::messages::ToolSpec; use crate::models::ToolResult; use crate::traits::Tool; +use serde_json::Value; +use wasmtime::component::{Component, Linker}; +use wasmtime::{Engine, Store}; + +/// The WIT interface name used by `cargo component` for tool exports. +const INTERFACE_NAME: &str = "amplifier:modules/tool@1.0.0"; + +/// Store state for wasmtime, holding the WASI context required by +/// `cargo component`-generated modules. +pub(crate) struct WasmState { + wasi: wasmtime_wasi::WasiCtx, + table: wasmtime::component::ResourceTable, + limiter: wasmtime::StoreLimits, +} + +impl wasmtime_wasi::WasiView for WasmState { + fn ctx(&mut self) -> wasmtime_wasi::WasiCtxView<'_> { + wasmtime_wasi::WasiCtxView { + ctx: &mut self.wasi, + table: &mut self.table, + } + } +} -/// A bridge that loads a WASM module and exposes it as a native [`Tool`]. +/// A bridge that loads a WASM Component and exposes it as a native [`Tool`]. /// -/// The WASM module is compiled once via wasmtime and can be instantiated -/// for each execution. Uses the same proto message serialization format -/// as gRPC bridges for consistency. +/// The component is compiled once and can be instantiated for each execution. +/// `get-spec` is called at construction time; `execute` is called per invocation +/// inside a `spawn_blocking` task (wasmtime is synchronous). pub struct WasmToolBridge { - _engine: wasmtime::Engine, - _module: wasmtime::Module, + engine: Arc, + component: Component, name: String, + spec: ToolSpec, +} + +/// Create a linker with WASI imports registered and a store with WASI context. +/// +/// Applies the given [`WasmLimits`](super::WasmLimits): +/// - **C-02**: CPU time limit via epoch interruption deadline +/// - **C-02**: Memory limit via [`StoreLimitsBuilder`](wasmtime::StoreLimitsBuilder) +/// - **H-01**: Restricted WASI capabilities — null I/O, no inherited env/args +pub(crate) fn create_linker_and_store( + engine: &Engine, + limits: &super::WasmLimits, +) -> Result<(Linker, Store), Box> { + let mut linker = Linker::::new(engine); + wasmtime_wasi::p2::add_to_linker_sync(&mut linker)?; + + // H-01: Restrict WASI capabilities — null I/O, no inherited env/args + let wasi = wasmtime_wasi::WasiCtxBuilder::new() + .stdin(wasmtime_wasi::p2::pipe::ClosedInputStream) + .stdout(wasmtime_wasi::p2::pipe::SinkOutputStream) + .stderr(wasmtime_wasi::p2::pipe::SinkOutputStream) + .build(); + + let table = wasmtime::component::ResourceTable::new(); + + // C-02: Memory limit via StoreLimitsBuilder + let limiter = wasmtime::StoreLimitsBuilder::new() + .memory_size(limits.max_memory_bytes) + .build(); + + let mut store = Store::new( + engine, + WasmState { + wasi, + table, + limiter, + }, + ); + + // C-02: CPU time limit via epoch interruption + store.set_epoch_deadline(limits.max_epoch_ticks); + + // C-02: Wire up the limiter so the store enforces memory limits + store.limiter(|state| &mut state.limiter); + + Ok((linker, store)) +} + +/// Look up a typed function export from a component instance. +/// +/// Component Model exports may be at the root level or nested inside an +/// exported interface instance. This helper tries: +/// 1. Direct root-level export by `func_name` +/// 2. Nested inside the [`INTERFACE_NAME`] exported instance +fn get_typed_func_from_instance( + instance: &wasmtime::component::Instance, + store: &mut Store, + func_name: &str, +) -> Result, Box> +where + Params: wasmtime::component::Lower + wasmtime::component::ComponentNamedList, + Results: wasmtime::component::Lift + wasmtime::component::ComponentNamedList, +{ + // Try direct root-level export first. + if let Ok(f) = instance.get_typed_func::(&mut *store, func_name) { + return Ok(f); + } + + // Try nested inside the interface-exported instance. + let iface_idx = instance + .get_export_index(&mut *store, None, INTERFACE_NAME) + .ok_or_else(|| format!("export instance '{INTERFACE_NAME}' not found"))?; + let func_idx = instance + .get_export_index(&mut *store, Some(&iface_idx), func_name) + .ok_or_else(|| format!("export function '{func_name}' not found in '{INTERFACE_NAME}'"))?; + let func = instance + .get_typed_func::(&mut *store, &func_idx) + .map_err(|e| format!("typed func lookup failed for '{func_name}': {e}"))?; + Ok(func) +} + +/// Helper: call the `get-spec` export on a fresh component instance. +fn call_get_spec( + engine: &Engine, + component: &Component, +) -> Result, Box> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = get_typed_func_from_instance::<(), (Vec,)>(&instance, &mut store, "get-spec")?; + let (spec_bytes,) = func.call(&mut store, ())?; + Ok(spec_bytes) +} + +/// Helper: call the `execute` export on a fresh component instance. +fn call_execute( + engine: &Engine, + component: &Component, + input_bytes: Vec, +) -> Result, Box> { + let (linker, mut store) = create_linker_and_store(engine, &super::WasmLimits::default())?; + let instance = linker.instantiate(&mut store, component)?; + + let func = get_typed_func_from_instance::<(Vec,), (Result, String>,)>( + &instance, &mut store, "execute", + )?; + let (result,) = func.call(&mut store, (input_bytes,))?; + match result { + Ok(bytes) => Ok(bytes), + Err(err) => Err(err.into()), + } } impl WasmToolBridge { - /// Load a WASM tool from raw bytes. + /// Load a WASM tool component from raw bytes. /// - /// Compiles the WASM module and prepares it for execution. - pub fn from_bytes(wasm_bytes: &[u8]) -> Result> { - let engine = wasmtime::Engine::default(); - let module = wasmtime::Module::new(&engine, wasm_bytes)?; - let name = module.name().unwrap_or("wasm-tool").to_string(); + /// Compiles the Component, instantiates it once to call `get-spec`, + /// and caches the resulting name and spec. + pub fn from_bytes( + wasm_bytes: &[u8], + engine: Arc, + ) -> Result> { + let component = Component::new(&engine, wasm_bytes)?; + + // Call get-spec to discover the tool's name and specification. + let spec_bytes = call_get_spec(&engine, &component)?; + let spec: ToolSpec = serde_json::from_slice(&spec_bytes)?; + let name = spec.name.clone(); Ok(Self { - _engine: engine, - _module: module, + engine, + component, name, + spec, }) } + + /// Convenience: load a WASM tool component from a file path. + pub fn from_file( + path: &Path, + engine: Arc, + ) -> Result> { + let bytes = + std::fs::read(path).map_err(|e| format!("failed to read {}: {e}", path.display()))?; + Self::from_bytes(&bytes, engine) + } } impl Tool for WasmToolBridge { @@ -51,31 +203,44 @@ impl Tool for WasmToolBridge { } fn description(&self) -> &str { - "WASM tool module" + self.spec + .description + .as_deref() + .unwrap_or("WASM tool module") } fn get_spec(&self) -> ToolSpec { - ToolSpec { - name: self.name.clone(), - parameters: HashMap::new(), - description: Some("WASM tool module".into()), - extensions: HashMap::new(), - } + self.spec.clone() } fn execute( &self, - _input: Value, + input: Value, ) -> Pin> + Send + '_>> { Box::pin(async move { - // Phase 5 stub: full WASM ABI integration is future work. - // The module is compiled and ready; execution requires defining - // the host↔guest function interface (imports/exports). - Err(ToolError::Other { - message: "WasmToolBridge::execute() not yet implemented: \ - WASM ABI host↔guest interface is future work" - .into(), - }) + let input_bytes = serde_json::to_vec(&input).map_err(|e| ToolError::Other { + message: format!("failed to serialize input: {e}"), + })?; + + let engine = Arc::clone(&self.engine); + let component = self.component.clone(); // Component is Arc-backed, cheap clone + + let result_bytes = + tokio::task::spawn_blocking(move || call_execute(&engine, &component, input_bytes)) + .await + .map_err(|e| ToolError::Other { + message: format!("WASM execution task panicked: {e}"), + })? + .map_err(|e| ToolError::Other { + message: format!("WASM execute failed: {e}"), + })?; + + let tool_result: ToolResult = + serde_json::from_slice(&result_bytes).map_err(|e| ToolError::Other { + message: format!("failed to deserialize ToolResult: {e}"), + })?; + + Ok(tool_result) }) } } @@ -85,14 +250,82 @@ mod tests { use super::*; use std::sync::Arc; - #[allow(dead_code)] - fn assert_tool_trait_object(_: Arc) {} - /// Compile-time check: WasmToolBridge satisfies Arc. + /// + /// Note: the integration test in `tests/wasm_tool_e2e.rs` has an equivalent + /// check from the *public* API surface. Both are intentional — this one + /// catches breakage during unit-test runs without needing the integration + /// test, while the integration test verifies the public export path. #[allow(dead_code)] - fn wasm_tool_bridge_is_tool() { - fn _check(bridge: WasmToolBridge) { - assert_tool_trait_object(Arc::new(bridge)); + fn _assert_wasm_tool_bridge_is_tool(bridge: WasmToolBridge) { + let _: Arc = Arc::new(bridge); + } + + /// Helper: read the echo-tool.wasm fixture bytes. + /// + /// The fixture lives at the workspace root under `tests/fixtures/wasm/`. + /// CARGO_MANIFEST_DIR points to `amplifier-core/crates/amplifier-core`, + /// so we walk up to the workspace root first. + fn echo_tool_wasm_bytes() -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + // Two candidates because the workspace root may be at different depths + // depending on how the repo is checked out: + // - 3 levels up: used as a git submodule (super-repo/amplifier-core/crates/amplifier-core) + // - 2 levels up: standalone checkout (amplifier-core/crates/amplifier-core) + let candidates = [ + manifest.join("../../../tests/fixtures/wasm/echo-tool.wasm"), + manifest.join("../../tests/fixtures/wasm/echo-tool.wasm"), + ]; + for p in &candidates { + if p.exists() { + return std::fs::read(p) + .unwrap_or_else(|e| panic!("Failed to read echo-tool.wasm at {p:?}: {e}")); + } } + panic!( + "echo-tool.wasm not found. Tried: {:?}", + candidates + .iter() + .map(|p| p.display().to_string()) + .collect::>() + ); + } + + /// Helper: create a shared engine with component model enabled. + fn make_engine() -> Arc { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + Arc::new(Engine::new(&config).expect("engine creation failed")) + } + + #[test] + fn load_echo_tool_from_bytes() { + let engine = make_engine(); + let bytes = echo_tool_wasm_bytes(); + let bridge = WasmToolBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + assert_eq!(bridge.name(), "echo-tool"); + + let spec = bridge.get_spec(); + assert_eq!(spec.name, "echo-tool"); + assert_eq!( + spec.description.as_deref(), + Some("Echoes input back as output") + ); + assert!(spec.parameters.contains_key("type")); + } + + #[tokio::test] + async fn echo_tool_execute_roundtrip() { + let engine = make_engine(); + let bytes = echo_tool_wasm_bytes(); + let bridge = WasmToolBridge::from_bytes(&bytes, engine).expect("from_bytes should succeed"); + + let input = serde_json::json!({"message": "hello", "count": 42}); + let result = bridge.execute(input.clone()).await; + let result = result.expect("execute should succeed"); + + assert!(result.success); + assert_eq!(result.output, Some(input)); } } diff --git a/crates/amplifier-core/src/generated/amplifier.module.rs b/crates/amplifier-core/src/generated/amplifier.module.rs index 1d3a3e6..615c8eb 100644 --- a/crates/amplifier-core/src/generated/amplifier.module.rs +++ b/crates/amplifier-core/src/generated/amplifier.module.rs @@ -326,12 +326,12 @@ pub struct Usage { pub completion_tokens: i32, #[prost(int32, tag = "3")] pub total_tokens: i32, - #[prost(int32, tag = "4")] - pub reasoning_tokens: i32, - #[prost(int32, tag = "5")] - pub cache_read_tokens: i32, - #[prost(int32, tag = "6")] - pub cache_creation_tokens: i32, + #[prost(int32, optional, tag = "4")] + pub reasoning_tokens: ::core::option::Option, + #[prost(int32, optional, tag = "5")] + pub cache_read_tokens: ::core::option::Option, + #[prost(int32, optional, tag = "6")] + pub cache_creation_tokens: ::core::option::Option, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct Degradation { @@ -419,8 +419,8 @@ pub struct HookResult { #[prost(string, repeated, tag = "8")] pub approval_options: ::prost::alloc::vec::Vec<::prost::alloc::string::String>, /// Default: 300.0 seconds (5 minutes). - #[prost(double, tag = "9")] - pub approval_timeout: f64, + #[prost(double, optional, tag = "9")] + pub approval_timeout: ::core::option::Option, #[prost(enumeration = "ApprovalDefault", tag = "10")] pub approval_default: i32, #[prost(bool, tag = "11")] @@ -474,8 +474,8 @@ pub struct ApprovalRequest { pub details_json: ::prost::alloc::string::String, #[prost(string, tag = "4")] pub risk_level: ::prost::alloc::string::String, - #[prost(double, tag = "5")] - pub timeout: f64, + #[prost(double, optional, tag = "5")] + pub timeout: ::core::option::Option, } #[derive(Clone, PartialEq, ::prost::Message)] pub struct ApprovalResponse { diff --git a/crates/amplifier-core/src/generated/conversions.rs b/crates/amplifier-core/src/generated/conversions.rs index e11d166..1df70e9 100644 --- a/crates/amplifier-core/src/generated/conversions.rs +++ b/crates/amplifier-core/src/generated/conversions.rs @@ -118,9 +118,6 @@ impl From for crate::models::ModelInfo { impl From for super::amplifier_module::Usage { fn from(native: crate::messages::Usage) -> Self { - // NOTE: Optional token counts (reasoning, cache_read, cache_write) use 0 as sentinel - // for 'not reported' because proto uses bare int32, not optional int32. This means - // Some(0) and None are indistinguishable. Fix requires proto schema change. Self { prompt_tokens: i32::try_from(native.input_tokens).unwrap_or_else(|_| { log::warn!( @@ -143,48 +140,917 @@ impl From for super::amplifier_module::Usage { ); i32::MAX }), - // TODO(grpc-v2): proto uses bare int32 — Some(0) and None are indistinguishable - reasoning_tokens: native.reasoning_tokens.unwrap_or(0) as i32, - // TODO(grpc-v2): proto uses bare int32 — Some(0) and None are indistinguishable - cache_read_tokens: native.cache_read_tokens.unwrap_or(0) as i32, - // TODO(grpc-v2): proto uses bare int32 — Some(0) and None are indistinguishable - cache_creation_tokens: native.cache_write_tokens.unwrap_or(0) as i32, + reasoning_tokens: native.reasoning_tokens.map(|v| { + i32::try_from(v).unwrap_or_else(|_| { + log::warn!("reasoning_tokens {} overflows i32, clamping to i32::MAX", v); + i32::MAX + }) + }), + cache_read_tokens: native.cache_read_tokens.map(|v| { + i32::try_from(v).unwrap_or_else(|_| { + log::warn!( + "cache_read_tokens {} overflows i32, clamping to i32::MAX", + v + ); + i32::MAX + }) + }), + cache_creation_tokens: native.cache_write_tokens.map(|v| { + i32::try_from(v).unwrap_or_else(|_| { + log::warn!( + "cache_write_tokens {} overflows i32, clamping to i32::MAX", + v + ); + i32::MAX + }) + }), } } } impl From for crate::messages::Usage { fn from(proto: super::amplifier_module::Usage) -> Self { - // NOTE: 0 values for reasoning/cache tokens are treated as 'not reported' (None). - // This is a known proto limitation. Self { input_tokens: i64::from(proto.prompt_tokens), output_tokens: i64::from(proto.completion_tokens), total_tokens: i64::from(proto.total_tokens), - reasoning_tokens: if proto.reasoning_tokens == 0 { + reasoning_tokens: proto.reasoning_tokens.map(i64::from), + cache_read_tokens: proto.cache_read_tokens.map(i64::from), + cache_write_tokens: proto.cache_creation_tokens.map(i64::from), + extensions: HashMap::new(), + } + } +} + +// --------------------------------------------------------------------------- +// Role conversion helpers +// --------------------------------------------------------------------------- + +use std::collections::HashMap; + +use super::amplifier_module::Role as ProtoRole; +use super::amplifier_module::Visibility as ProtoVisibility; +use crate::messages::Role; + +/// Convert a native [`crate::messages::Role`] to its proto `i32` equivalent. +pub fn native_role_to_proto(role: Role) -> i32 { + match role { + Role::System => ProtoRole::System as i32, + Role::User => ProtoRole::User as i32, + Role::Assistant => ProtoRole::Assistant as i32, + Role::Tool => ProtoRole::Tool as i32, + Role::Function => ProtoRole::Function as i32, + Role::Developer => ProtoRole::Developer as i32, + } +} + +/// Convert a proto `i32` role value to a native [`crate::messages::Role`]. +/// +/// `Unspecified` (0) and unknown values default to [`crate::messages::Role::User`] +/// with a warning log. +pub fn proto_role_to_native(proto_role: i32) -> Role { + match ProtoRole::try_from(proto_role) { + Ok(ProtoRole::System) => Role::System, + Ok(ProtoRole::User) => Role::User, + Ok(ProtoRole::Assistant) => Role::Assistant, + Ok(ProtoRole::Tool) => Role::Tool, + Ok(ProtoRole::Function) => Role::Function, + Ok(ProtoRole::Developer) => Role::Developer, + Ok(ProtoRole::Unspecified) => { + log::warn!("Proto role Unspecified (0), defaulting to User"); + Role::User + } + Err(_) => { + log::warn!("Unknown proto role value {proto_role}, defaulting to User"); + Role::User + } + } +} + +// --------------------------------------------------------------------------- +// Visibility conversion helpers (private) +// --------------------------------------------------------------------------- + +fn native_visibility_to_proto(vis: &Option) -> i32 { + match vis { + None => ProtoVisibility::Unspecified as i32, + Some(crate::messages::Visibility::Internal) => ProtoVisibility::LlmOnly as i32, + Some(crate::messages::Visibility::Developer) => ProtoVisibility::All as i32, + Some(crate::messages::Visibility::User) => ProtoVisibility::UserOnly as i32, + } +} + +fn proto_visibility_to_native(vis: i32) -> Option { + match ProtoVisibility::try_from(vis) { + Ok(ProtoVisibility::LlmOnly) => Some(crate::messages::Visibility::Internal), + Ok(ProtoVisibility::All) => Some(crate::messages::Visibility::Developer), + Ok(ProtoVisibility::UserOnly) => Some(crate::messages::Visibility::User), + _ => None, // Unspecified or unknown + } +} + +// --------------------------------------------------------------------------- +// ContentBlock conversion helpers (private) +// --------------------------------------------------------------------------- + +fn native_content_block_to_proto( + block: crate::messages::ContentBlock, +) -> super::amplifier_module::ContentBlock { + use super::amplifier_module::content_block::Block; + use crate::messages::ContentBlock; + + let (proto_block, vis) = match block { + ContentBlock::Text { + text, visibility, .. + } => ( + Block::TextBlock(super::amplifier_module::TextBlock { text }), + visibility, + ), + ContentBlock::Thinking { + thinking, + signature, + visibility, + content, + .. + } => ( + Block::ThinkingBlock(super::amplifier_module::ThinkingBlock { + thinking, + signature: signature.unwrap_or_default(), + content: content + .map(|v| { + serde_json::to_string(&v).unwrap_or_else(|e| { + log::warn!("Failed to serialize Thinking content to JSON: {e}"); + String::new() + }) + }) + .unwrap_or_default(), + }), + visibility, + ), + ContentBlock::RedactedThinking { + data, visibility, .. + } => ( + Block::RedactedThinkingBlock(super::amplifier_module::RedactedThinkingBlock { data }), + visibility, + ), + ContentBlock::ToolCall { + id, + name, + input, + visibility, + .. + } => ( + Block::ToolCallBlock(super::amplifier_module::ToolCallBlock { + id, + name, + input_json: serde_json::to_string(&input).unwrap_or_else(|e| { + log::warn!("Failed to serialize ToolCall input to JSON: {e}"); + String::new() + }), + }), + visibility, + ), + ContentBlock::ToolResult { + tool_call_id, + output, + visibility, + .. + } => ( + Block::ToolResultBlock(super::amplifier_module::ToolResultBlock { + tool_call_id, + output_json: serde_json::to_string(&output).unwrap_or_else(|e| { + log::warn!("Failed to serialize ToolResult output to JSON: {e}"); + String::new() + }), + }), + visibility, + ), + ContentBlock::Image { + source, visibility, .. + } => ( + Block::ImageBlock(super::amplifier_module::ImageBlock { + media_type: source + .get("media_type") + .and_then(|v| v.as_str()) + .unwrap_or_default() + .to_string(), + data: source + .get("data") + .and_then(|v| v.as_str()) + .unwrap_or_default() + .as_bytes() + .to_vec(), + source_json: serde_json::to_string(&source).unwrap_or_else(|e| { + log::warn!("Failed to serialize Image source to JSON: {e}"); + String::new() + }), + }), + visibility, + ), + ContentBlock::Reasoning { + content, + summary, + visibility, + .. + } => ( + Block::ReasoningBlock(super::amplifier_module::ReasoningBlock { + content: content + .into_iter() + .map(|v| { + serde_json::to_string(&v).unwrap_or_else(|e| { + log::warn!("Failed to serialize Reasoning content item to JSON: {e}"); + String::new() + }) + }) + .collect(), + summary: summary + .into_iter() + .map(|v| { + serde_json::to_string(&v).unwrap_or_else(|e| { + log::warn!("Failed to serialize Reasoning summary item to JSON: {e}"); + String::new() + }) + }) + .collect(), + }), + visibility, + ), + }; + + super::amplifier_module::ContentBlock { + block: Some(proto_block), + visibility: native_visibility_to_proto(&vis), + } +} + +fn proto_content_block_to_native( + block: super::amplifier_module::ContentBlock, +) -> crate::messages::ContentBlock { + use super::amplifier_module::content_block::Block; + use crate::messages::ContentBlock; + + let vis = proto_visibility_to_native(block.visibility); + + match block.block { + Some(Block::TextBlock(tb)) => ContentBlock::Text { + text: tb.text, + visibility: vis, + extensions: HashMap::new(), + }, + Some(Block::ThinkingBlock(tb)) => ContentBlock::Thinking { + thinking: tb.thinking, + signature: if tb.signature.is_empty() { None } else { - Some(i64::from(proto.reasoning_tokens)) + Some(tb.signature) }, - cache_read_tokens: if proto.cache_read_tokens == 0 { + visibility: vis, + content: if tb.content.is_empty() { None } else { - Some(i64::from(proto.cache_read_tokens)) + serde_json::from_str(&tb.content) + .map_err(|e| { + log::warn!("Failed to deserialize ThinkingBlock content: {e}"); + e + }) + .ok() }, - cache_write_tokens: if proto.cache_creation_tokens == 0 { - None + extensions: HashMap::new(), + }, + Some(Block::RedactedThinkingBlock(rb)) => ContentBlock::RedactedThinking { + data: rb.data, + visibility: vis, + extensions: HashMap::new(), + }, + Some(Block::ToolCallBlock(tc)) => ContentBlock::ToolCall { + id: tc.id, + name: tc.name, + input: serde_json::from_str(&tc.input_json).unwrap_or_else(|e| { + log::warn!("Failed to deserialize ToolCallBlock input_json: {e}"); + Default::default() + }), + visibility: vis, + extensions: HashMap::new(), + }, + Some(Block::ToolResultBlock(tr)) => ContentBlock::ToolResult { + tool_call_id: tr.tool_call_id, + output: serde_json::from_str(&tr.output_json).unwrap_or_else(|e| { + log::warn!("Failed to deserialize ToolResultBlock output_json: {e}"); + serde_json::Value::Null + }), + visibility: vis, + extensions: HashMap::new(), + }, + Some(Block::ImageBlock(ib)) => ContentBlock::Image { + source: if ib.source_json.is_empty() { + HashMap::new() } else { - Some(i64::from(proto.cache_creation_tokens)) + serde_json::from_str(&ib.source_json).unwrap_or_else(|e| { + log::warn!("Failed to deserialize ImageBlock source_json: {e}"); + Default::default() + }) }, - extensions: std::collections::HashMap::new(), + visibility: vis, + extensions: HashMap::new(), + }, + Some(Block::ReasoningBlock(rb)) => ContentBlock::Reasoning { + content: rb + .content + .into_iter() + .filter_map(|s| { + serde_json::from_str(&s) + .map_err(|e| { + log::warn!("Failed to deserialize ReasoningBlock content item: {e}"); + e + }) + .ok() + }) + .collect(), + summary: rb + .summary + .into_iter() + .filter_map(|s| { + serde_json::from_str(&s) + .map_err(|e| { + log::warn!("Failed to deserialize ReasoningBlock summary item: {e}"); + e + }) + .ok() + }) + .collect(), + visibility: vis, + extensions: HashMap::new(), + }, + None => { + log::warn!("Proto ContentBlock has no block variant set, falling back to empty Text"); + ContentBlock::Text { + text: String::new(), + visibility: vis, + extensions: HashMap::new(), + } } } } +// --------------------------------------------------------------------------- +// Message conversion functions (public) +// --------------------------------------------------------------------------- + +/// Convert a native [`crate::messages::Message`] to its proto equivalent. +pub fn native_message_to_proto(msg: crate::messages::Message) -> super::amplifier_module::Message { + use super::amplifier_module::message; + + let content = match msg.content { + crate::messages::MessageContent::Text(s) => Some(message::Content::TextContent(s)), + crate::messages::MessageContent::Blocks(blocks) => { + let proto_blocks: Vec<_> = blocks + .into_iter() + .map(native_content_block_to_proto) + .collect(); + Some(message::Content::BlockContent( + super::amplifier_module::ContentBlockList { + blocks: proto_blocks, + }, + )) + } + }; + + super::amplifier_module::Message { + role: native_role_to_proto(msg.role), + content, + name: msg.name.unwrap_or_default(), + tool_call_id: msg.tool_call_id.unwrap_or_default(), + metadata_json: msg + .metadata + .map(|m| { + serde_json::to_string(&m).unwrap_or_else(|e| { + log::warn!("Failed to serialize Message metadata to JSON: {e}"); + String::new() + }) + }) + .unwrap_or_default(), + } +} + +/// Convert a proto [`super::amplifier_module::Message`] to a native +/// [`crate::messages::Message`]. +/// +/// Returns `Err` if the proto message has no content (the `oneof content` +/// field is `None`). +pub fn proto_message_to_native( + proto: super::amplifier_module::Message, +) -> Result { + let content = match proto.content { + None => return Err("Message has no content".to_string()), + Some(super::amplifier_module::message::Content::TextContent(s)) => { + crate::messages::MessageContent::Text(s) + } + Some(super::amplifier_module::message::Content::BlockContent(bl)) => { + crate::messages::MessageContent::Blocks( + bl.blocks + .into_iter() + .map(proto_content_block_to_native) + .collect(), + ) + } + }; + + Ok(crate::messages::Message { + role: proto_role_to_native(proto.role), + content, + name: if proto.name.is_empty() { + None + } else { + Some(proto.name) + }, + tool_call_id: if proto.tool_call_id.is_empty() { + None + } else { + Some(proto.tool_call_id) + }, + metadata: if proto.metadata_json.is_empty() { + None + } else { + serde_json::from_str(&proto.metadata_json) + .map_err(|e| { + log::warn!("Failed to deserialize Message metadata_json: {e}"); + e + }) + .ok() + }, + extensions: HashMap::new(), + }) +} + +// --------------------------------------------------------------------------- +// HookResult conversion functions (public) +// --------------------------------------------------------------------------- + +/// Convert a native [`crate::models::HookResult`] to its proto equivalent. +/// +/// # Field mapping notes +/// +/// - `action`: native enum variant → proto `HookAction` i32 +/// - `context_injection_role`: native enum → proto `ContextInjectionRole` i32 +/// - `approval_default`: native `Allow` → proto `Approve`, native `Deny` → proto `Deny` +/// - `user_message_level`: native enum → proto `UserMessageLevel` i32 +/// - `approval_timeout`: native `f64` → proto `Option` (always `Some`) +/// - `approval_options`: native `Option>` → proto `Vec` (None → empty) +/// - All `Option` fields → proto `String` (None → empty string) +/// - `data`: `Option>` serialized to JSON or empty string +/// - `extensions`: dropped (proto has no extensions field) +pub fn native_hook_result_to_proto( + result: &crate::models::HookResult, +) -> super::amplifier_module::HookResult { + use super::amplifier_module; + use crate::models::{ApprovalDefault, ContextInjectionRole, HookAction, UserMessageLevel}; + + let action = match result.action { + HookAction::Continue => amplifier_module::HookAction::Continue as i32, + HookAction::Modify => amplifier_module::HookAction::Modify as i32, + HookAction::Deny => amplifier_module::HookAction::Deny as i32, + HookAction::InjectContext => amplifier_module::HookAction::InjectContext as i32, + HookAction::AskUser => amplifier_module::HookAction::AskUser as i32, + }; + + let context_injection_role = match result.context_injection_role { + ContextInjectionRole::System => amplifier_module::ContextInjectionRole::System as i32, + ContextInjectionRole::User => amplifier_module::ContextInjectionRole::User as i32, + ContextInjectionRole::Assistant => amplifier_module::ContextInjectionRole::Assistant as i32, + }; + + let approval_default = match result.approval_default { + ApprovalDefault::Allow => amplifier_module::ApprovalDefault::Approve as i32, + ApprovalDefault::Deny => amplifier_module::ApprovalDefault::Deny as i32, + }; + + let user_message_level = match result.user_message_level { + UserMessageLevel::Info => amplifier_module::UserMessageLevel::Info as i32, + UserMessageLevel::Warning => amplifier_module::UserMessageLevel::Warning as i32, + UserMessageLevel::Error => amplifier_module::UserMessageLevel::Error as i32, + }; + + let data_json = result + .data + .as_ref() + .map(|d| { + serde_json::to_string(d).unwrap_or_else(|e| { + log::warn!("Failed to serialize HookResult data to JSON: {e}"); + String::new() + }) + }) + .unwrap_or_default(); + + amplifier_module::HookResult { + action, + data_json, + reason: result.reason.clone().unwrap_or_default(), + context_injection: result.context_injection.clone().unwrap_or_default(), + context_injection_role, + ephemeral: result.ephemeral, + approval_prompt: result.approval_prompt.clone().unwrap_or_default(), + approval_options: result.approval_options.clone().unwrap_or_default(), + approval_timeout: Some(result.approval_timeout), + approval_default, + suppress_output: result.suppress_output, + user_message: result.user_message.clone().unwrap_or_default(), + user_message_level, + user_message_source: result.user_message_source.clone().unwrap_or_default(), + append_to_last_tool_result: result.append_to_last_tool_result, + } +} + +// --------------------------------------------------------------------------- +// ChatRequest conversion functions (public) +// --------------------------------------------------------------------------- + +/// Convert a native [`crate::messages::ChatRequest`] to its proto equivalent. +/// +/// # Sentinel value conventions +/// +/// Since proto scalar fields (`temperature`, `top_p`, `max_output_tokens`, +/// `stream`, `timeout`, etc.) lack `optional`, the following conventions apply +/// for the reverse direction (`proto_chat_request_to_native`): +/// +/// - `temperature`, `top_p`, `timeout` == `0.0` → `None` +/// - `max_output_tokens` == `0` → `None` +/// - Empty strings → `None` for string optionals +/// - `stream == false` → `None` +/// +/// Tests should use non-zero / non-empty values to verify full roundtrip +/// fidelity. +pub fn native_chat_request_to_proto( + request: &crate::messages::ChatRequest, +) -> super::amplifier_module::ChatRequest { + use super::amplifier_module::{ + response_format, JsonSchemaFormat, ResponseFormat as ProtoResponseFormat, ToolSpecProto, + }; + use crate::messages::{ResponseFormat, ToolChoice}; + + super::amplifier_module::ChatRequest { + messages: request + .messages + .iter() + .map(|m| native_message_to_proto(m.clone())) + .collect(), + tools: request + .tools + .as_deref() + .unwrap_or(&[]) + .iter() + .map(|t| ToolSpecProto { + name: t.name.clone(), + description: t.description.clone().unwrap_or_default(), + parameters_json: serde_json::to_string(&t.parameters).unwrap_or_else(|e| { + log::warn!("Failed to serialize ToolSpec parameters to JSON: {e}"); + String::new() + }), + }) + .collect(), + response_format: request.response_format.as_ref().map(|rf| match rf { + ResponseFormat::Text => ProtoResponseFormat { + format: Some(response_format::Format::Text(true)), + }, + ResponseFormat::Json => ProtoResponseFormat { + format: Some(response_format::Format::Json(true)), + }, + ResponseFormat::JsonSchema { schema, strict } => ProtoResponseFormat { + format: Some(response_format::Format::JsonSchema(JsonSchemaFormat { + schema_json: serde_json::to_string(schema).unwrap_or_else(|e| { + log::warn!("Failed to serialize JsonSchema schema to JSON: {e}"); + String::new() + }), + strict: strict.unwrap_or(false), + })), + }, + }), + temperature: request.temperature.unwrap_or(0.0), + top_p: request.top_p.unwrap_or(0.0), + max_output_tokens: request + .max_output_tokens + .map(|v| { + i32::try_from(v).unwrap_or_else(|_| { + log::warn!( + "max_output_tokens {} overflows i32, clamping to i32::MAX", + v + ); + i32::MAX + }) + }) + .unwrap_or(0), + conversation_id: request.conversation_id.clone().unwrap_or_default(), + stream: request.stream.unwrap_or(false), + metadata_json: request + .metadata + .as_ref() + .map(|m| { + serde_json::to_string(m).unwrap_or_else(|e| { + log::warn!("Failed to serialize ChatRequest metadata to JSON: {e}"); + String::new() + }) + }) + .unwrap_or_default(), + model: request.model.clone().unwrap_or_default(), + tool_choice: request + .tool_choice + .as_ref() + .map(|tc| match tc { + ToolChoice::String(s) => s.clone(), + ToolChoice::Object(obj) => serde_json::to_string(obj).unwrap_or_else(|e| { + log::warn!("Failed to serialize ToolChoice object to JSON: {e}"); + String::new() + }), + }) + .unwrap_or_default(), + stop: request.stop.clone().unwrap_or_default(), + reasoning_effort: request.reasoning_effort.clone().unwrap_or_default(), + timeout: request.timeout.unwrap_or(0.0), + } +} + +/// Convert a proto [`super::amplifier_module::ChatRequest`] to a native +/// [`crate::messages::ChatRequest`]. +/// +/// See [`native_chat_request_to_proto`] for the sentinel value conventions +/// used for scalar fields that have no `optional` proto modifier. +/// +/// For `tool_choice`: if the stored string parses as a JSON object it is +/// returned as [`crate::messages::ToolChoice::Object`]; otherwise it is +/// treated as a plain [`crate::messages::ToolChoice::String`]. +/// +/// Messages that fail to convert are silently skipped with a warning log. +pub fn proto_chat_request_to_native( + request: super::amplifier_module::ChatRequest, +) -> crate::messages::ChatRequest { + use super::amplifier_module::response_format; + use crate::messages::{ResponseFormat, ToolChoice, ToolSpec}; + + crate::messages::ChatRequest { + messages: request + .messages + .into_iter() + .filter_map(|m| { + proto_message_to_native(m) + .map_err(|e| { + log::warn!("Skipping invalid message in ChatRequest: {e}"); + e + }) + .ok() + }) + .collect(), + tools: if request.tools.is_empty() { + None + } else { + Some( + request + .tools + .into_iter() + .map(|t| ToolSpec { + name: t.name, + description: if t.description.is_empty() { + None + } else { + Some(t.description) + }, + parameters: if t.parameters_json.is_empty() { + HashMap::new() + } else { + serde_json::from_str(&t.parameters_json).unwrap_or_else(|e| { + log::warn!("Failed to deserialize ToolSpec parameters_json: {e}"); + Default::default() + }) + }, + extensions: HashMap::new(), + }) + .collect(), + ) + }, + response_format: request.response_format.and_then(|rf| match rf.format { + Some(response_format::Format::Text(_)) => Some(ResponseFormat::Text), + Some(response_format::Format::Json(_)) => Some(ResponseFormat::Json), + Some(response_format::Format::JsonSchema(js)) => { + let schema = if js.schema_json.is_empty() { + HashMap::new() + } else { + serde_json::from_str(&js.schema_json).unwrap_or_else(|e| { + log::warn!("Failed to deserialize JsonSchemaFormat schema_json: {e}"); + Default::default() + }) + }; + Some(ResponseFormat::JsonSchema { + schema, + // proto `strict` is non-optional bool; false → None, true → Some(true) + strict: if js.strict { Some(true) } else { None }, + }) + } + None => None, + }), + // Sentinel: 0.0 means "not set" + temperature: if request.temperature == 0.0 { + None + } else { + Some(request.temperature) + }, + top_p: if request.top_p == 0.0 { + None + } else { + Some(request.top_p) + }, + max_output_tokens: if request.max_output_tokens == 0 { + None + } else { + Some(i64::from(request.max_output_tokens)) + }, + conversation_id: if request.conversation_id.is_empty() { + None + } else { + Some(request.conversation_id) + }, + // Sentinel: false means "not set" + stream: if request.stream { Some(true) } else { None }, + metadata: if request.metadata_json.is_empty() { + None + } else { + serde_json::from_str(&request.metadata_json) + .map_err(|e| { + log::warn!("Failed to deserialize ChatRequest metadata_json: {e}"); + e + }) + .ok() + }, + model: if request.model.is_empty() { + None + } else { + Some(request.model) + }, + tool_choice: if request.tool_choice.is_empty() { + None + } else { + // Try to parse as a JSON object; fall back to a plain string value. + match serde_json::from_str::>( + &request.tool_choice, + ) { + Ok(map) => Some(ToolChoice::Object(map.into_iter().collect())), + Err(_) => Some(ToolChoice::String(request.tool_choice)), + } + }, + stop: if request.stop.is_empty() { + None + } else { + Some(request.stop) + }, + reasoning_effort: if request.reasoning_effort.is_empty() { + None + } else { + Some(request.reasoning_effort) + }, + timeout: if request.timeout == 0.0 { + None + } else { + Some(request.timeout) + }, + extensions: HashMap::new(), + } +} + +// --------------------------------------------------------------------------- +// ChatResponse conversion functions (public) +// --------------------------------------------------------------------------- + +/// Convert a native [`crate::messages::ChatResponse`] to its proto equivalent. +/// +/// # Field mapping notes +/// +/// - `content`: the full `Vec` is serialized as a JSON string into +/// the proto `content` field (empty string when no content). +/// - `tool_calls`: each `ToolCall.arguments` map is serialized to +/// `ToolCallMessage.arguments_json`. +/// - `usage`: delegated to the existing `Usage` `From` impl. +/// - `degradation`: mapped field-for-field (extensions are dropped). +/// - `finish_reason`: empty string sentinel in proto → `None` on restore. +/// - `metadata`: serialized to `metadata_json`. +/// - `extensions`: dropped (proto has no extensions field). +pub fn native_chat_response_to_proto( + response: &crate::messages::ChatResponse, +) -> super::amplifier_module::ChatResponse { + super::amplifier_module::ChatResponse { + content: serde_json::to_string(&response.content).unwrap_or_else(|e| { + log::warn!("Failed to serialize ChatResponse content to JSON: {e}"); + String::new() + }), + tool_calls: response + .tool_calls + .as_deref() + .unwrap_or(&[]) + .iter() + .map(|tc| super::amplifier_module::ToolCallMessage { + id: tc.id.clone(), + name: tc.name.clone(), + arguments_json: serde_json::to_string(&tc.arguments).unwrap_or_else(|e| { + log::warn!("Failed to serialize ToolCall arguments to JSON: {e}"); + String::new() + }), + }) + .collect(), + usage: response.usage.clone().map(Into::into), + degradation: response + .degradation + .as_ref() + .map(|d| super::amplifier_module::Degradation { + requested: d.requested.clone(), + actual: d.actual.clone(), + reason: d.reason.clone(), + }), + finish_reason: response.finish_reason.clone().unwrap_or_default(), + metadata_json: response + .metadata + .as_ref() + .map(|m| { + serde_json::to_string(m).unwrap_or_else(|e| { + log::warn!("Failed to serialize ChatResponse metadata to JSON: {e}"); + String::new() + }) + }) + .unwrap_or_default(), + } +} + +/// Convert a proto [`super::amplifier_module::ChatResponse`] to a native +/// [`crate::messages::ChatResponse`]. +/// +/// - `content`: JSON-deserialized back to `Vec`; empty string → empty `Vec`. +/// - `tool_calls`: empty repeated field → `None`; non-empty → `Some(Vec)`. +/// - `finish_reason`: empty string → `None`. +/// - `metadata_json`: empty string → `None`. +/// - `extensions`: always empty (proto has no extensions field). +pub fn proto_chat_response_to_native( + response: super::amplifier_module::ChatResponse, +) -> crate::messages::ChatResponse { + crate::messages::ChatResponse { + content: if response.content.is_empty() { + Vec::new() + } else { + serde_json::from_str(&response.content).unwrap_or_else(|e| { + log::warn!("Failed to deserialize ChatResponse content: {e}"); + Vec::new() + }) + }, + tool_calls: if response.tool_calls.is_empty() { + None + } else { + Some( + response + .tool_calls + .into_iter() + .map(|tc| crate::messages::ToolCall { + id: tc.id, + name: tc.name, + arguments: if tc.arguments_json.is_empty() { + HashMap::new() + } else { + serde_json::from_str(&tc.arguments_json).unwrap_or_else(|e| { + log::warn!("Failed to deserialize ToolCall arguments_json: {e}"); + Default::default() + }) + }, + extensions: HashMap::new(), + }) + .collect(), + ) + }, + usage: response.usage.map(Into::into), + degradation: response.degradation.map(|d| crate::messages::Degradation { + requested: d.requested, + actual: d.actual, + reason: d.reason, + extensions: HashMap::new(), + }), + finish_reason: if response.finish_reason.is_empty() { + None + } else { + Some(response.finish_reason) + }, + metadata: if response.metadata_json.is_empty() { + None + } else { + serde_json::from_str(&response.metadata_json) + .map_err(|e| { + log::warn!("Failed to deserialize ChatResponse metadata_json: {e}"); + e + }) + .ok() + }, + extensions: HashMap::new(), + } +} + #[cfg(test)] mod tests { use std::collections::HashMap; + use super::super::amplifier_module::Role as ProtoRole; + use crate::messages::Role; + #[test] fn tool_result_roundtrip() { let original = crate::models::ToolResult { @@ -245,7 +1111,7 @@ mod tests { assert_eq!(original.total_tokens, restored.total_tokens); assert_eq!(original.reasoning_tokens, restored.reasoning_tokens); assert_eq!(original.cache_read_tokens, restored.cache_read_tokens); - // cache_write_tokens: None → 0 → None (roundtrip preserves None) + // cache_write_tokens: None → None (optional proto preserves None) assert_eq!(restored.cache_write_tokens, None); // extensions are lost in proto roundtrip (proto has no extensions field) assert!(restored.extensions.is_empty()); @@ -272,6 +1138,37 @@ mod tests { assert_eq!(original.cache_write_tokens, restored.cache_write_tokens); } + /// Verify that `Some(0)` survives roundtrip now that proto uses `optional` fields. + #[test] + fn usage_some_zero_roundtrips_correctly() { + let original = crate::messages::Usage { + input_tokens: 100, + output_tokens: 50, + total_tokens: 150, + reasoning_tokens: Some(0), + cache_read_tokens: Some(0), + cache_write_tokens: Some(0), + extensions: HashMap::new(), + }; + let proto: super::super::amplifier_module::Usage = original.clone().into(); + let restored: crate::messages::Usage = proto.into(); + assert_eq!( + restored.reasoning_tokens, + Some(0), + "Some(0) reasoning_tokens must survive roundtrip" + ); + assert_eq!( + restored.cache_read_tokens, + Some(0), + "Some(0) cache_read_tokens must survive roundtrip" + ); + assert_eq!( + restored.cache_write_tokens, + Some(0), + "Some(0) cache_write_tokens must survive roundtrip" + ); + } + // -- E-3: ModelInfo i64→i32 overflow clamps to i32::MAX -- #[test] @@ -348,4 +1245,1209 @@ mod tests { let proto: super::super::amplifier_module::Usage = original.into(); assert_eq!(proto.total_tokens, i32::MAX); } + + // -- Role conversion helper tests -- + + #[test] + fn native_role_to_proto_role_all_variants() { + assert_eq!( + super::native_role_to_proto(Role::System), + ProtoRole::System as i32 + ); + assert_eq!( + super::native_role_to_proto(Role::User), + ProtoRole::User as i32 + ); + assert_eq!( + super::native_role_to_proto(Role::Assistant), + ProtoRole::Assistant as i32 + ); + assert_eq!( + super::native_role_to_proto(Role::Tool), + ProtoRole::Tool as i32 + ); + assert_eq!( + super::native_role_to_proto(Role::Function), + ProtoRole::Function as i32 + ); + assert_eq!( + super::native_role_to_proto(Role::Developer), + ProtoRole::Developer as i32 + ); + } + + #[test] + fn proto_role_to_native_role_all_variants() { + assert_eq!( + super::proto_role_to_native(ProtoRole::System as i32), + Role::System + ); + assert_eq!( + super::proto_role_to_native(ProtoRole::User as i32), + Role::User + ); + assert_eq!( + super::proto_role_to_native(ProtoRole::Assistant as i32), + Role::Assistant + ); + assert_eq!( + super::proto_role_to_native(ProtoRole::Tool as i32), + Role::Tool + ); + assert_eq!( + super::proto_role_to_native(ProtoRole::Function as i32), + Role::Function + ); + assert_eq!( + super::proto_role_to_native(ProtoRole::Developer as i32), + Role::Developer + ); + } + + #[test] + fn proto_role_unspecified_defaults_to_user() { + assert_eq!( + super::proto_role_to_native(ProtoRole::Unspecified as i32), + Role::User + ); + } + + #[test] + fn proto_role_unknown_defaults_to_user() { + // 999 and -1 are not valid proto Role values + assert_eq!(super::proto_role_to_native(999), Role::User); + assert_eq!(super::proto_role_to_native(-1), Role::User); + } + + // -- Message conversion tests -- + + #[test] + fn message_text_content_roundtrip() { + use crate::messages::{Message, MessageContent}; + + let original = Message { + role: Role::User, + content: MessageContent::Text("Hello, world!".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.role, original.role); + assert_eq!(restored.content, original.content); + assert_eq!(restored.name, None); + assert_eq!(restored.tool_call_id, None); + } + + #[test] + fn message_block_content_text_roundtrip() { + use crate::messages::{ContentBlock, Message, MessageContent}; + + let original = Message { + role: Role::Assistant, + content: MessageContent::Blocks(vec![ContentBlock::Text { + text: "thinking...".into(), + visibility: None, + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.role, original.role); + assert_eq!(restored.content, original.content); + } + + #[test] + fn message_with_tool_call_id_roundtrip() { + use crate::messages::{Message, MessageContent}; + + let original = Message { + role: Role::Tool, + content: MessageContent::Text("result data".into()), + name: Some("read_file".into()), + tool_call_id: Some("call_123".into()), + metadata: Some(HashMap::from([( + "source".to_string(), + serde_json::json!("test"), + )])), + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.role, original.role); + assert_eq!(restored.content, original.content); + assert_eq!(restored.name, Some("read_file".into())); + assert_eq!(restored.tool_call_id, Some("call_123".into())); + assert_eq!(restored.metadata, original.metadata); + } + + // -- Individual ContentBlock variant roundtrip tests -- + + #[test] + fn content_block_thinking_roundtrip() { + use crate::messages::{ContentBlock, Message, MessageContent, Visibility}; + + let original = Message { + role: Role::Assistant, + content: MessageContent::Blocks(vec![ContentBlock::Thinking { + thinking: "Let me reason about this...".into(), + signature: Some("sig_abc123".into()), + visibility: Some(Visibility::Internal), + content: Some(vec![serde_json::json!({"type": "text", "text": "inner"})]), + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.content, original.content); + } + + #[test] + fn content_block_redacted_thinking_roundtrip() { + use crate::messages::{ContentBlock, Message, MessageContent}; + + let original = Message { + role: Role::Assistant, + content: MessageContent::Blocks(vec![ContentBlock::RedactedThinking { + data: "redacted_data_blob".into(), + visibility: None, + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.content, original.content); + } + + #[test] + fn content_block_tool_call_roundtrip() { + use crate::messages::{ContentBlock, Message, MessageContent, Visibility}; + + let original = Message { + role: Role::Assistant, + content: MessageContent::Blocks(vec![ContentBlock::ToolCall { + id: "call_456".into(), + name: "read_file".into(), + input: HashMap::from([("path".to_string(), serde_json::json!("/tmp/test.txt"))]), + visibility: Some(Visibility::Developer), + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.content, original.content); + } + + #[test] + fn content_block_tool_result_roundtrip() { + use crate::messages::{ContentBlock, Message, MessageContent}; + + let original = Message { + role: Role::Tool, + content: MessageContent::Blocks(vec![ContentBlock::ToolResult { + tool_call_id: "call_456".into(), + output: serde_json::json!({"status": "ok", "lines": 42}), + visibility: None, + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.content, original.content); + } + + #[test] + fn content_block_image_roundtrip() { + use crate::messages::{ContentBlock, Message, MessageContent, Visibility}; + + let source = HashMap::from([ + ("media_type".to_string(), serde_json::json!("image/png")), + ("data".to_string(), serde_json::json!("iVBORw0KGgo=")), + ]); + let original = Message { + role: Role::User, + content: MessageContent::Blocks(vec![ContentBlock::Image { + source, + visibility: Some(Visibility::User), + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.content, original.content); + } + + #[test] + fn content_block_reasoning_roundtrip() { + use crate::messages::{ContentBlock, Message, MessageContent}; + + let original = Message { + role: Role::Assistant, + content: MessageContent::Blocks(vec![ContentBlock::Reasoning { + content: vec![ + serde_json::json!({"type": "text", "text": "Step 1"}), + serde_json::json!({"type": "text", "text": "Step 2"}), + ], + summary: vec![serde_json::json!({"type": "text", "text": "Summary"})], + visibility: None, + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(original.clone()); + let restored = super::proto_message_to_native(proto).expect("should succeed"); + assert_eq!(restored.content, original.content); + } + + #[test] + fn message_none_content_returns_error() { + use super::super::amplifier_module; + + let proto = amplifier_module::Message { + role: amplifier_module::Role::User as i32, + content: None, + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }; + let result = super::proto_message_to_native(proto); + assert!(result.is_err(), "None content should return Err"); + } + + // -- ChatRequest conversion tests -- + + #[test] + fn chat_request_minimal_roundtrip() { + use crate::messages::{ChatRequest, Message, MessageContent}; + + let original = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("Hello!".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_request_to_proto(&original); + let restored = super::proto_chat_request_to_native(proto); + + assert_eq!(restored.messages.len(), 1); + assert_eq!(restored.messages[0].role, original.messages[0].role); + assert_eq!(restored.messages[0].content, original.messages[0].content); + assert!(restored.tools.is_none()); + assert!(restored.response_format.is_none()); + assert!(restored.temperature.is_none()); + assert!(restored.model.is_none()); + } + + #[test] + fn chat_request_full_fields_roundtrip() { + use crate::messages::{ + ChatRequest, Message, MessageContent, ResponseFormat, ToolChoice, ToolSpec, + }; + + let original = ChatRequest { + messages: vec![Message { + role: Role::Assistant, + content: MessageContent::Text("I can help!".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: Some(vec![ToolSpec { + name: "search".into(), + description: Some("Search the web".into()), + parameters: { + let mut m = HashMap::new(); + m.insert("type".into(), serde_json::json!("object")); + m.insert( + "properties".into(), + serde_json::json!({"query": {"type": "string"}}), + ); + m + }, + extensions: HashMap::new(), + }]), + response_format: Some(ResponseFormat::Text), + temperature: Some(0.7), + top_p: Some(0.9), + max_output_tokens: Some(2048), + conversation_id: Some("conv_abc".into()), + stream: Some(true), + metadata: Some({ + let mut m = HashMap::new(); + m.insert("source".into(), serde_json::json!("test-suite")); + m + }), + model: Some("gpt-4o".into()), + tool_choice: Some(ToolChoice::String("auto".into())), + stop: Some(vec!["END".into(), "STOP".into()]), + reasoning_effort: Some("high".into()), + timeout: Some(30.0), + extensions: HashMap::new(), + }; + + let proto = super::native_chat_request_to_proto(&original); + let restored = super::proto_chat_request_to_native(proto); + + assert_eq!(restored.messages.len(), 1); + assert_eq!(restored.model, Some("gpt-4o".into())); + assert_eq!(restored.temperature, Some(0.7)); + assert_eq!(restored.top_p, Some(0.9)); + assert_eq!(restored.max_output_tokens, Some(2048)); + assert_eq!(restored.conversation_id, Some("conv_abc".into())); + assert_eq!(restored.stream, Some(true)); + assert_eq!(restored.reasoning_effort, Some("high".into())); + assert_eq!(restored.timeout, Some(30.0)); + assert_eq!(restored.stop, Some(vec!["END".into(), "STOP".into()])); + assert_eq!( + restored.tool_choice, + Some(ToolChoice::String("auto".into())) + ); + assert_eq!(restored.response_format, Some(ResponseFormat::Text)); + assert_eq!(restored.metadata, original.metadata); + } + + #[test] + fn chat_request_tools_roundtrip() { + use crate::messages::{ChatRequest, Message, MessageContent, ToolSpec}; + + let original = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("help".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: Some(vec![ + ToolSpec { + name: "read_file".into(), + description: Some("Read a file from disk".into()), + parameters: { + let mut m = HashMap::new(); + m.insert("type".into(), serde_json::json!("object")); + m + }, + extensions: HashMap::new(), + }, + ToolSpec { + name: "write_file".into(), + description: None, + parameters: HashMap::new(), + extensions: HashMap::new(), + }, + ]), + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_request_to_proto(&original); + let restored = super::proto_chat_request_to_native(proto); + + let tools = restored.tools.expect("tools must be Some"); + assert_eq!(tools.len(), 2); + assert_eq!(tools[0].name, "read_file"); + assert_eq!(tools[0].description, Some("Read a file from disk".into())); + let params_type = tools[0].parameters.get("type"); + assert_eq!(params_type, Some(&serde_json::json!("object"))); + assert_eq!(tools[1].name, "write_file"); + assert!(tools[1].description.is_none()); + } + + #[test] + fn chat_request_response_format_json_roundtrip() { + use crate::messages::{ChatRequest, Message, MessageContent, ResponseFormat}; + + let original = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("go".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: Some(ResponseFormat::Json), + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_request_to_proto(&original); + let restored = super::proto_chat_request_to_native(proto); + assert_eq!(restored.response_format, Some(ResponseFormat::Json)); + } + + #[test] + fn chat_request_response_format_json_schema_roundtrip() { + use crate::messages::{ChatRequest, Message, MessageContent, ResponseFormat}; + + let schema = { + let mut m = HashMap::new(); + m.insert("type".into(), serde_json::json!("object")); + m.insert( + "properties".into(), + serde_json::json!({"answer": {"type": "string"}}), + ); + m + }; + + let original = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("go".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: Some(ResponseFormat::JsonSchema { + schema: schema.clone(), + strict: Some(true), + }), + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_request_to_proto(&original); + let restored = super::proto_chat_request_to_native(proto); + + match restored.response_format { + Some(ResponseFormat::JsonSchema { + schema: restored_schema, + strict, + }) => { + assert_eq!( + restored_schema.get("type"), + Some(&serde_json::json!("object")) + ); + assert_eq!(strict, Some(true)); + } + other => panic!("Expected JsonSchema response_format, got: {other:?}"), + } + } + + #[test] + fn chat_request_tool_choice_object_roundtrip() { + use crate::messages::{ChatRequest, Message, MessageContent, ToolChoice}; + + let tool_choice_obj = { + let mut m = HashMap::new(); + m.insert("type".into(), serde_json::json!("function")); + m.insert("function".into(), serde_json::json!({"name": "read_file"})); + m + }; + + let original = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("do it".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: Some(ToolChoice::Object(tool_choice_obj.clone())), + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_request_to_proto(&original); + let restored = super::proto_chat_request_to_native(proto); + + match restored.tool_choice { + Some(ToolChoice::Object(obj)) => { + assert_eq!(obj.get("type"), Some(&serde_json::json!("function"))); + assert_eq!( + obj.get("function"), + Some(&serde_json::json!({"name": "read_file"})) + ); + } + other => panic!("Expected ToolChoice::Object, got: {other:?}"), + } + } + + // -- ChatResponse conversion tests (RED: functions not yet implemented) -- + + #[test] + fn chat_response_minimal_roundtrip() { + use crate::messages::ChatResponse; + + let original = ChatResponse { + content: vec![crate::messages::ContentBlock::Text { + text: "Hello, world!".into(), + visibility: None, + extensions: HashMap::new(), + }], + tool_calls: None, + usage: None, + degradation: None, + finish_reason: None, + metadata: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_response_to_proto(&original); + let restored = super::proto_chat_response_to_native(proto); + + assert_eq!(restored.content.len(), 1); + assert_eq!(restored.content, original.content); + assert!(restored.tool_calls.is_none()); + assert!(restored.usage.is_none()); + assert!(restored.degradation.is_none()); + assert!(restored.finish_reason.is_none()); + assert!(restored.metadata.is_none()); + } + + #[test] + fn chat_response_full_fields_roundtrip() { + use crate::messages::{ChatResponse, Degradation, ToolCall, Usage}; + + let original = ChatResponse { + content: vec![ + crate::messages::ContentBlock::Text { + text: "Here's the answer.".into(), + visibility: None, + extensions: HashMap::new(), + }, + crate::messages::ContentBlock::Thinking { + thinking: "Let me reason...".into(), + signature: Some("sig_xyz".into()), + visibility: Some(crate::messages::Visibility::Internal), + content: None, + extensions: HashMap::new(), + }, + ], + tool_calls: Some(vec![ToolCall { + id: "call_001".into(), + name: "search".into(), + arguments: HashMap::from([ + ("query".to_string(), serde_json::json!("rust async")), + ("limit".to_string(), serde_json::json!(10)), + ]), + extensions: HashMap::new(), + }]), + usage: Some(Usage { + input_tokens: 200, + output_tokens: 100, + total_tokens: 300, + reasoning_tokens: Some(50), + cache_read_tokens: Some(20), + cache_write_tokens: None, + extensions: HashMap::new(), + }), + degradation: Some(Degradation { + requested: "gpt-4-turbo".into(), + actual: "gpt-4".into(), + reason: "rate limit".into(), + extensions: HashMap::new(), + }), + finish_reason: Some("stop".into()), + metadata: Some(HashMap::from([( + "request_id".to_string(), + serde_json::json!("req_abc123"), + )])), + extensions: HashMap::new(), + }; + + let proto = super::native_chat_response_to_proto(&original); + let restored = super::proto_chat_response_to_native(proto); + + // content blocks + assert_eq!(restored.content.len(), 2); + assert_eq!(restored.content, original.content); + + // tool_calls + let tool_calls = restored + .tool_calls + .as_ref() + .expect("tool_calls must be Some"); + assert_eq!(tool_calls.len(), 1); + assert_eq!(tool_calls[0].id, "call_001"); + assert_eq!(tool_calls[0].name, "search"); + assert_eq!( + tool_calls[0].arguments.get("query"), + Some(&serde_json::json!("rust async")) + ); + assert_eq!( + tool_calls[0].arguments.get("limit"), + Some(&serde_json::json!(10)) + ); + + // usage + let usage = restored.usage.as_ref().expect("usage must be Some"); + assert_eq!(usage.input_tokens, 200); + assert_eq!(usage.output_tokens, 100); + assert_eq!(usage.total_tokens, 300); + assert_eq!(usage.reasoning_tokens, Some(50)); + assert_eq!(usage.cache_read_tokens, Some(20)); + + // degradation + let deg = restored + .degradation + .as_ref() + .expect("degradation must be Some"); + assert_eq!(deg.requested, "gpt-4-turbo"); + assert_eq!(deg.actual, "gpt-4"); + assert_eq!(deg.reason, "rate limit"); + + // finish_reason + assert_eq!(restored.finish_reason, Some("stop".into())); + + // metadata + let meta = restored.metadata.as_ref().expect("metadata must be Some"); + assert_eq!( + meta.get("request_id"), + Some(&serde_json::json!("req_abc123")) + ); + } + + #[test] + fn chat_response_tool_calls_roundtrip() { + use crate::messages::{ChatResponse, ToolCall}; + + let original = ChatResponse { + content: vec![crate::messages::ContentBlock::Text { + text: "Let me look that up.".into(), + visibility: None, + extensions: HashMap::new(), + }], + tool_calls: Some(vec![ + ToolCall { + id: "call_A".into(), + name: "read_file".into(), + arguments: HashMap::from([( + "path".to_string(), + serde_json::json!("/tmp/data.txt"), + )]), + extensions: HashMap::new(), + }, + ToolCall { + id: "call_B".into(), + name: "write_file".into(), + arguments: HashMap::from([ + ("path".to_string(), serde_json::json!("/tmp/out.txt")), + ("content".to_string(), serde_json::json!("hello")), + ]), + extensions: HashMap::new(), + }, + ]), + usage: None, + degradation: None, + finish_reason: Some("tool_calls".into()), + metadata: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_response_to_proto(&original); + let restored = super::proto_chat_response_to_native(proto); + + let tool_calls = restored.tool_calls.expect("tool_calls must be Some"); + assert_eq!(tool_calls.len(), 2); + assert_eq!(tool_calls[0].id, "call_A"); + assert_eq!(tool_calls[0].name, "read_file"); + assert_eq!(tool_calls[1].id, "call_B"); + assert_eq!(tool_calls[1].name, "write_file"); + assert_eq!(restored.finish_reason, Some("tool_calls".into())); + } + + #[test] + fn chat_response_empty_content_roundtrip() { + use crate::messages::ChatResponse; + + let original = ChatResponse { + content: vec![], + tool_calls: None, + usage: None, + degradation: None, + finish_reason: Some("stop".into()), + metadata: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_response_to_proto(&original); + let restored = super::proto_chat_response_to_native(proto); + + assert!(restored.content.is_empty()); + assert_eq!(restored.finish_reason, Some("stop".into())); + } + + // -- HookResult native → proto conversion tests (RED: function not yet implemented) -- + + #[test] + fn hook_result_default_native_to_proto_fields() { + use super::super::amplifier_module; + use crate::models::HookResult; + + let native = HookResult::default(); + let proto = super::native_hook_result_to_proto(&native); + + // action: Continue (default) + assert_eq!(proto.action, amplifier_module::HookAction::Continue as i32); + // string optionals → empty strings + assert_eq!(proto.reason, ""); + assert_eq!(proto.context_injection, ""); + assert_eq!(proto.approval_prompt, ""); + assert_eq!(proto.user_message, ""); + assert_eq!(proto.user_message_source, ""); + // data_json: None → empty string + assert_eq!(proto.data_json, ""); + // bools: false (default) + assert!(!proto.ephemeral); + assert!(!proto.suppress_output); + assert!(!proto.append_to_last_tool_result); + // approval_options: None → empty vec + assert!(proto.approval_options.is_empty()); + // approval_timeout: 300.0 → Some(300.0) + assert_eq!(proto.approval_timeout, Some(300.0)); + // approval_default: Deny (default) + assert_eq!( + proto.approval_default, + amplifier_module::ApprovalDefault::Deny as i32 + ); + // context_injection_role: System (default) + assert_eq!( + proto.context_injection_role, + amplifier_module::ContextInjectionRole::System as i32 + ); + // user_message_level: Info (default) + assert_eq!( + proto.user_message_level, + amplifier_module::UserMessageLevel::Info as i32 + ); + } + + #[test] + fn hook_result_all_hook_action_variants_to_proto() { + use super::super::amplifier_module; + use crate::models::{HookAction, HookResult}; + + let cases = [ + ( + HookAction::Continue, + amplifier_module::HookAction::Continue as i32, + ), + ( + HookAction::Modify, + amplifier_module::HookAction::Modify as i32, + ), + (HookAction::Deny, amplifier_module::HookAction::Deny as i32), + ( + HookAction::InjectContext, + amplifier_module::HookAction::InjectContext as i32, + ), + ( + HookAction::AskUser, + amplifier_module::HookAction::AskUser as i32, + ), + ]; + for (native_action, expected_i32) in cases { + let native = HookResult { + action: native_action, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!(proto.action, expected_i32); + } + } + + #[test] + fn hook_result_context_injection_role_all_variants_to_proto() { + use super::super::amplifier_module; + use crate::models::{ContextInjectionRole, HookResult}; + + let cases = [ + ( + ContextInjectionRole::System, + amplifier_module::ContextInjectionRole::System as i32, + ), + ( + ContextInjectionRole::User, + amplifier_module::ContextInjectionRole::User as i32, + ), + ( + ContextInjectionRole::Assistant, + amplifier_module::ContextInjectionRole::Assistant as i32, + ), + ]; + for (native_role, expected_i32) in cases { + let native = HookResult { + context_injection_role: native_role, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!(proto.context_injection_role, expected_i32); + } + } + + #[test] + fn hook_result_approval_default_all_variants_to_proto() { + use super::super::amplifier_module; + use crate::models::{ApprovalDefault, HookResult}; + + // Allow → Approve + let native = HookResult { + approval_default: ApprovalDefault::Allow, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!( + proto.approval_default, + amplifier_module::ApprovalDefault::Approve as i32 + ); + + // Deny → Deny + let native = HookResult { + approval_default: ApprovalDefault::Deny, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!( + proto.approval_default, + amplifier_module::ApprovalDefault::Deny as i32 + ); + } + + #[test] + fn hook_result_user_message_level_all_variants_to_proto() { + use super::super::amplifier_module; + use crate::models::{HookResult, UserMessageLevel}; + + let cases = [ + ( + UserMessageLevel::Info, + amplifier_module::UserMessageLevel::Info as i32, + ), + ( + UserMessageLevel::Warning, + amplifier_module::UserMessageLevel::Warning as i32, + ), + ( + UserMessageLevel::Error, + amplifier_module::UserMessageLevel::Error as i32, + ), + ]; + for (native_level, expected_i32) in cases { + let native = HookResult { + user_message_level: native_level, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!(proto.user_message_level, expected_i32); + } + } + + #[test] + fn hook_result_string_option_fields_to_proto() { + use crate::models::HookResult; + + let native = HookResult { + reason: Some("blocked".to_string()), + context_injection: Some("extra context".to_string()), + approval_prompt: Some("Proceed?".to_string()), + user_message: Some("Watch out!".to_string()), + user_message_source: Some("security-hook".to_string()), + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!(proto.reason, "blocked"); + assert_eq!(proto.context_injection, "extra context"); + assert_eq!(proto.approval_prompt, "Proceed?"); + assert_eq!(proto.user_message, "Watch out!"); + assert_eq!(proto.user_message_source, "security-hook"); + } + + #[test] + fn hook_result_bool_fields_to_proto() { + use crate::models::HookResult; + + let native = HookResult { + ephemeral: true, + suppress_output: true, + append_to_last_tool_result: true, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert!(proto.ephemeral); + assert!(proto.suppress_output); + assert!(proto.append_to_last_tool_result); + } + + #[test] + fn hook_result_approval_options_some_to_proto() { + use crate::models::HookResult; + + let native = HookResult { + approval_options: Some(vec!["allow".to_string(), "deny".to_string()]), + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!( + proto.approval_options, + vec!["allow".to_string(), "deny".to_string()] + ); + } + + #[test] + fn hook_result_approval_options_none_to_empty_vec() { + use crate::models::HookResult; + + let native = HookResult { + approval_options: None, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert!(proto.approval_options.is_empty()); + } + + #[test] + fn hook_result_approval_timeout_to_optional_proto() { + use crate::models::HookResult; + + // Default 300.0 → Some(300.0) + let native = HookResult { + approval_timeout: 300.0, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!(proto.approval_timeout, Some(300.0)); + + // Custom 60.0 → Some(60.0) + let native = HookResult { + approval_timeout: 60.0, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!(proto.approval_timeout, Some(60.0)); + } + + #[test] + fn hook_result_data_json_some_to_proto() { + use crate::models::HookResult; + + let mut data = HashMap::new(); + data.insert("key".to_string(), serde_json::json!("value")); + let native = HookResult { + data: Some(data), + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + // Should be valid non-empty JSON + assert!(!proto.data_json.is_empty()); + let parsed: serde_json::Value = + serde_json::from_str(&proto.data_json).expect("data_json should be valid JSON"); + assert_eq!(parsed["key"], serde_json::json!("value")); + } + + #[test] + fn hook_result_data_json_none_to_empty_string() { + use crate::models::HookResult; + + let native = HookResult { + data: None, + ..Default::default() + }; + let proto = super::native_hook_result_to_proto(&native); + assert_eq!(proto.data_json, ""); + } + + #[test] + fn hook_result_roundtrip_via_bridge_reverse() { + use crate::bridges::grpc_hook::GrpcHookBridge; + use crate::models::{ + ApprovalDefault, ContextInjectionRole, HookAction, HookResult, UserMessageLevel, + }; + + let original = HookResult { + action: HookAction::AskUser, + data: None, + reason: Some("needs approval".to_string()), + context_injection: Some("please confirm".to_string()), + context_injection_role: ContextInjectionRole::User, + ephemeral: true, + approval_prompt: Some("Allow this action?".to_string()), + approval_options: Some(vec!["yes".to_string(), "no".to_string()]), + approval_timeout: 120.0, + approval_default: ApprovalDefault::Allow, + suppress_output: true, + user_message: Some("Action requires approval".to_string()), + user_message_level: UserMessageLevel::Warning, + user_message_source: Some("approval-hook".to_string()), + append_to_last_tool_result: false, + extensions: HashMap::new(), + }; + + let proto = super::native_hook_result_to_proto(&original); + let restored = GrpcHookBridge::proto_to_native_hook_result(proto); + + assert_eq!(restored.action, original.action); + assert_eq!(restored.reason, original.reason); + assert_eq!(restored.context_injection, original.context_injection); + assert_eq!( + restored.context_injection_role, + original.context_injection_role + ); + assert_eq!(restored.ephemeral, original.ephemeral); + assert_eq!(restored.approval_prompt, original.approval_prompt); + assert_eq!(restored.approval_options, original.approval_options); + assert_eq!(restored.approval_timeout, original.approval_timeout); + assert_eq!(restored.approval_default, original.approval_default); + assert_eq!(restored.suppress_output, original.suppress_output); + assert_eq!(restored.user_message, original.user_message); + assert_eq!(restored.user_message_level, original.user_message_level); + assert_eq!(restored.user_message_source, original.user_message_source); + assert_eq!( + restored.append_to_last_tool_result, + original.append_to_last_tool_result + ); + } + + #[test] + fn chat_request_multiple_messages_roundtrip() { + use crate::messages::{ChatRequest, ContentBlock, Message, MessageContent}; + + let original = ChatRequest { + messages: vec![ + Message { + role: Role::System, + content: MessageContent::Text("You are helpful.".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }, + Message { + role: Role::User, + content: MessageContent::Blocks(vec![ContentBlock::Text { + text: "Help me!".into(), + visibility: None, + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }, + ], + tools: None, + response_format: None, + temperature: Some(1.0), + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: Some("claude-3-opus".into()), + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let proto = super::native_chat_request_to_proto(&original); + let restored = super::proto_chat_request_to_native(proto); + + assert_eq!(restored.messages.len(), 2); + assert_eq!(restored.messages[0].role, Role::System); + assert_eq!( + restored.messages[0].content, + MessageContent::Text("You are helpful.".into()) + ); + assert_eq!(restored.messages[1].role, Role::User); + assert_eq!(restored.model, Some("claude-3-opus".into())); + assert_eq!(restored.temperature, Some(1.0)); + } } diff --git a/crates/amplifier-core/src/generated/equivalence_tests.rs b/crates/amplifier-core/src/generated/equivalence_tests.rs index f945d6d..87b4775 100644 --- a/crates/amplifier-core/src/generated/equivalence_tests.rs +++ b/crates/amplifier-core/src/generated/equivalence_tests.rs @@ -69,7 +69,7 @@ mod tests { ephemeral: true, approval_prompt: "Allow this action?".into(), approval_options: vec!["yes".into(), "no".into(), "always".into()], - approval_timeout: 300.0, + approval_timeout: Some(300.0), approval_default: ApprovalDefault::Deny as i32, suppress_output: false, user_message: "Action requires approval".into(), @@ -88,7 +88,7 @@ mod tests { assert!(result.ephemeral); assert_eq!(result.approval_prompt, "Allow this action?"); assert_eq!(result.approval_options.len(), 3); - assert!((result.approval_timeout - 300.0).abs() < f64::EPSILON); + assert_eq!(result.approval_timeout, Some(300.0)); assert_eq!(result.approval_default, ApprovalDefault::Deny as i32); assert!(!result.suppress_output); assert_eq!(result.user_message, "Action requires approval"); @@ -173,16 +173,16 @@ mod tests { prompt_tokens: 100, completion_tokens: 50, total_tokens: 150, - reasoning_tokens: 20, - cache_read_tokens: 30, - cache_creation_tokens: 10, + reasoning_tokens: Some(20), + cache_read_tokens: Some(30), + cache_creation_tokens: Some(10), }; assert_eq!(usage.prompt_tokens, 100); assert_eq!(usage.completion_tokens, 50); assert_eq!(usage.total_tokens, 150); - assert_eq!(usage.reasoning_tokens, 20); - assert_eq!(usage.cache_read_tokens, 30); - assert_eq!(usage.cache_creation_tokens, 10); + assert_eq!(usage.reasoning_tokens, Some(20)); + assert_eq!(usage.cache_read_tokens, Some(30)); + assert_eq!(usage.cache_creation_tokens, Some(10)); } #[test] @@ -210,13 +210,13 @@ mod tests { action: "execute".into(), details_json: r#"{"command":"rm -rf /tmp/test"}"#.into(), risk_level: "high".into(), - timeout: 120.0, + timeout: Some(120.0), }; assert_eq!(request.tool_name, "bash"); assert_eq!(request.action, "execute"); assert_eq!(request.details_json, r#"{"command":"rm -rf /tmp/test"}"#); assert_eq!(request.risk_level, "high"); - assert!((request.timeout - 120.0).abs() < f64::EPSILON); + assert_eq!(request.timeout, Some(120.0)); let response = ApprovalResponse { approved: true, diff --git a/crates/amplifier-core/src/grpc_server.rs b/crates/amplifier-core/src/grpc_server.rs index 007a999..a90f49b 100644 --- a/crates/amplifier-core/src/grpc_server.rs +++ b/crates/amplifier-core/src/grpc_server.rs @@ -6,11 +6,69 @@ use std::sync::Arc; +use tonic::service::Interceptor; use tonic::{Request, Response, Status}; use crate::coordinator::Coordinator; use crate::generated::amplifier_module; use crate::generated::amplifier_module::kernel_service_server::KernelService; +use crate::generated::conversions::{ + native_chat_response_to_proto, native_hook_result_to_proto, native_message_to_proto, + proto_chat_request_to_native, proto_message_to_native, +}; + +/// Shared-secret authentication interceptor for KernelService. +/// Validates the `x-amplifier-token` metadata header on every request. +#[derive(Clone)] +pub struct AuthInterceptor { + expected_token: String, +} + +impl AuthInterceptor { + pub fn new(token: String) -> Self { + Self { + expected_token: token, + } + } +} + +impl Interceptor for AuthInterceptor { + fn call(&mut self, request: tonic::Request<()>) -> Result, Status> { + let token = request + .metadata() + .get("x-amplifier-token") + .and_then(|v| v.to_str().ok()); + + match token { + Some(t) if t == self.expected_token => Ok(request), + Some(_) => Err(Status::unauthenticated("invalid token")), + None => Err(Status::unauthenticated("missing x-amplifier-token header")), + } + } +} + +/// Maximum allowed size for any JSON payload field received over gRPC. +/// +/// Payloads exceeding this limit are rejected with `Status::invalid_argument` +/// before any parsing or coordinator work is attempted. +const MAX_JSON_PAYLOAD_BYTES: usize = 64 * 1024; // 64 KB + +/// Validate that a JSON string field does not exceed [`MAX_JSON_PAYLOAD_BYTES`]. +/// +/// Returns `Err(Status::invalid_argument(...))` when the payload is too large, +/// so callers can use the `?` operator directly. +/// +/// `tonic::Status` is unavoidably large; suppressing `result_large_err` here is +/// consistent with every other gRPC method in this file. +#[allow(clippy::result_large_err)] +fn validate_json_size(json: &str, field_name: &str) -> Result<(), Status> { + if json.len() > MAX_JSON_PAYLOAD_BYTES { + return Err(Status::invalid_argument(format!( + "{field_name} exceeds maximum size of {MAX_JSON_PAYLOAD_BYTES} bytes" + ))); + } + Ok(()) +} /// Implementation of the KernelService gRPC server. /// @@ -25,17 +83,57 @@ impl KernelServiceImpl { pub fn new(coordinator: Arc) -> Self { Self { coordinator } } + + /// Create a new KernelServiceImpl with a randomly generated auth token. + /// Returns `(service, token)` — the token must be passed to connecting modules. + pub fn new_with_auth(coordinator: Arc) -> (Self, String) { + let token = uuid::Uuid::new_v4().to_string(); + (Self { coordinator }, token) + } } #[tonic::async_trait] impl KernelService for KernelServiceImpl { async fn complete_with_provider( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented( - "CompleteWithProvider not yet implemented", - )) + let req = request.into_inner(); + let provider_name = &req.provider_name; + + // Look up the provider in the coordinator + let provider = self + .coordinator + .get_provider(provider_name) + .ok_or_else(|| { + log::debug!("Provider lookup failed: {provider_name}"); + Status::not_found("Provider not available") + })?; + + // Extract the proto ChatRequest (required field) + let proto_chat_request = req + .request + .ok_or_else(|| Status::invalid_argument("Missing required field: request"))?; + + // Enforce payload size limit on the request's metadata_json field + if !proto_chat_request.metadata_json.is_empty() { + validate_json_size(&proto_chat_request.metadata_json, "request.metadata_json")?; + } + + // Convert proto ChatRequest → native ChatRequest + let native_request = proto_chat_request_to_native(proto_chat_request); + + // Call the provider + match provider.complete(native_request).await { + Ok(native_response) => { + let proto_response = native_chat_response_to_proto(&native_response); + Ok(Response::new(proto_response)) + } + Err(e) => { + log::error!("Provider completion failed for {provider_name}: {e}"); + Err(Status::internal("Provider completion failed")) + } + } } type CompleteWithProviderStreamingStream = @@ -43,11 +141,55 @@ impl KernelService for KernelServiceImpl { async fn complete_with_provider_streaming( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented( - "CompleteWithProviderStreaming not yet implemented", - )) + let req = request.into_inner(); + let provider_name = &req.provider_name; + + // Look up the provider in the coordinator + let provider = self + .coordinator + .get_provider(provider_name) + .ok_or_else(|| { + log::debug!("Provider lookup failed: {provider_name}"); + Status::not_found("Provider not available") + })?; + + // Extract the proto ChatRequest (required field) + let proto_chat_request = req + .request + .ok_or_else(|| Status::invalid_argument("Missing required field: request"))?; + + // Enforce payload size limit on the request's metadata_json field + if !proto_chat_request.metadata_json.is_empty() { + validate_json_size(&proto_chat_request.metadata_json, "request.metadata_json")?; + } + + // Convert proto ChatRequest → native ChatRequest + let native_request = proto_chat_request_to_native(proto_chat_request); + + // Call the provider + let native_response = provider.complete(native_request).await.map_err(|e| { + log::error!("Provider completion failed for {provider_name}: {e}"); + Status::internal("Provider completion failed") + })?; + + let proto_response = native_chat_response_to_proto(&native_response); + + // NOTE: This is a one-shot "streaming" endpoint — it awaits the full provider + // response, then sends it as a single stream element. True token-level streaming + // requires provider.complete_stream() → Stream, which is + // not yet implemented. This endpoint exists for proto/gRPC API compatibility + // so clients can use the streaming RPC shape ahead of the real implementation. + let (tx, rx) = tokio::sync::mpsc::channel(1); + if tx.send(Ok(proto_response)).await.is_err() { + log::debug!("Streaming client disconnected before response was sent"); + } + // `tx` is dropped here, closing the channel and ending the stream. + + Ok(Response::new(tokio_stream::wrappers::ReceiverStream::new( + rx, + ))) } async fn execute_tool( @@ -58,14 +200,19 @@ impl KernelService for KernelServiceImpl { let tool_name = &req.tool_name; // Look up the tool in the coordinator - let tool = self - .coordinator - .get_tool(tool_name) - .ok_or_else(|| Status::not_found(format!("Tool not found: {tool_name}")))?; + let tool = self.coordinator.get_tool(tool_name).ok_or_else(|| { + log::debug!("Tool lookup failed: {tool_name}"); + Status::not_found("Tool not available") + })?; + + // Enforce payload size limit before parsing + validate_json_size(&req.input_json, "input_json")?; // Parse input JSON - let input: serde_json::Value = serde_json::from_str(&req.input_json) - .map_err(|e| Status::invalid_argument(format!("Invalid input JSON: {e}")))?; + let input: serde_json::Value = serde_json::from_str(&req.input_json).map_err(|e| { + log::debug!("Tool input JSON parse error for {tool_name}: {e}"); + Status::invalid_argument("Invalid input JSON") + })?; // Execute the tool match tool.execute(input).await { @@ -94,63 +241,291 @@ impl KernelService for KernelServiceImpl { error_json, })) } - Err(e) => Err(Status::internal(format!("Tool execution failed: {e}"))), + Err(e) => { + log::error!("Tool execution failed for {tool_name}: {e}"); + Err(Status::internal("Tool execution failed")) + } } } async fn emit_hook( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented("EmitHook not yet implemented")) + let req = request.into_inner(); + + // Enforce payload size limit before parsing + if !req.data_json.is_empty() { + validate_json_size(&req.data_json, "data_json")?; + } + + let data: serde_json::Value = if req.data_json.is_empty() { + serde_json::json!({}) + } else { + serde_json::from_str(&req.data_json).map_err(|e| { + log::debug!( + "emit_hook data_json parse error for event '{}': {e}", + req.event + ); + Status::invalid_argument("Invalid data_json") + })? + }; + + let result = self.coordinator.hooks().emit(&req.event, data).await; + let proto_result = native_hook_result_to_proto(&result); + Ok(Response::new(proto_result)) } async fn emit_hook_and_collect( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented( - "EmitHookAndCollect not yet implemented", + let req = request.into_inner(); + + // Enforce payload size limit before parsing + if !req.data_json.is_empty() { + validate_json_size(&req.data_json, "data_json")?; + } + + let data: serde_json::Value = if req.data_json.is_empty() { + serde_json::json!({}) + } else { + serde_json::from_str(&req.data_json).map_err(|e| { + log::debug!( + "emit_hook_and_collect data_json parse error for event '{}': {e}", + req.event + ); + Status::invalid_argument("Invalid data_json") + })? + }; + + let timeout = if req.timeout_seconds > 0.0 { + std::time::Duration::from_secs_f64(req.timeout_seconds) + } else { + std::time::Duration::from_secs(30) + }; + + let results = self + .coordinator + .hooks() + .emit_and_collect(&req.event, data, timeout) + .await; + + let responses_json: Vec = results + .iter() + .map(|map| { + serde_json::to_string(map).unwrap_or_else(|e| { + log::warn!("Failed to serialize hook collect result to JSON: {e}"); + String::new() + }) + }) + .collect(); + + Ok(Response::new( + amplifier_module::EmitHookAndCollectResponse { responses_json }, )) } + /// Get all conversation messages from the context manager. + /// + /// ## Session routing + /// + /// Session routing is implicit — each `KernelServiceImpl` is scoped to one + /// `Coordinator`. The `session_id` field is logged but not validated. + /// Cross-session isolation requires deploying separate `KernelService` + /// instances per session. async fn get_messages( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented("GetMessages not yet implemented")) + let req = request.into_inner(); + log::debug!( + "get_messages: session_id={:?} (routing is implicit — \ + each KernelServiceImpl is scoped to one Coordinator)", + req.session_id + ); + + let context = self + .coordinator + .context() + .ok_or_else(|| Status::failed_precondition("No context manager mounted"))?; + + let values = context.get_messages().await.map_err(|e| { + log::error!("Failed to get messages from context: {e}"); + Status::internal("Failed to get messages") + })?; + + let messages: Vec = values + .into_iter() + .filter_map(|v| { + serde_json::from_value::(v) + .map(native_message_to_proto) + .map_err(|e| { + log::warn!("Skipping message that failed to deserialize: {e}"); + e + }) + .ok() + }) + .collect(); + + Ok(Response::new(amplifier_module::GetMessagesResponse { + messages, + })) } + /// Add a message to the context manager. + /// + /// ## Session routing + /// + /// Session routing is implicit — each `KernelServiceImpl` is scoped to one + /// `Coordinator`. The `session_id` field is logged but not validated. + /// Cross-session isolation requires deploying separate `KernelService` + /// instances per session. async fn add_message( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented("AddMessage not yet implemented")) + let req = request.into_inner(); + log::debug!( + "add_message: session_id={:?} (routing is implicit — \ + each KernelServiceImpl is scoped to one Coordinator)", + req.session_id + ); + + let proto_message = req + .message + .ok_or_else(|| Status::invalid_argument("Missing required field: message"))?; + + // Enforce payload size limit on the message's metadata_json field + if !proto_message.metadata_json.is_empty() { + validate_json_size(&proto_message.metadata_json, "message.metadata_json")?; + } + + let native_message = proto_message_to_native(proto_message).map_err(|e| { + log::debug!("Message conversion error: {e}"); + Status::invalid_argument("Invalid message") + })?; + + let value = serde_json::to_value(native_message).map_err(|e| { + log::error!("Failed to serialize message to JSON: {e}"); + Status::internal("Failed to serialize message") + })?; + + let context = self + .coordinator + .context() + .ok_or_else(|| Status::failed_precondition("No context manager mounted"))?; + + context.add_message(value).await.map_err(|e| { + log::error!("Failed to add message to context: {e}"); + Status::internal("Failed to add message") + })?; + + Ok(Response::new(amplifier_module::Empty {})) } async fn get_mounted_module( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented( - "GetMountedModule not yet implemented", - )) + let req = request.into_inner(); + let module_name = &req.module_name; + let module_type = amplifier_module::ModuleType::try_from(req.module_type) + .unwrap_or(amplifier_module::ModuleType::Unspecified); + + let found_info: Option = match module_type { + amplifier_module::ModuleType::Tool => { + self.coordinator + .get_tool(module_name) + .map(|tool| amplifier_module::ModuleInfo { + name: tool.name().to_string(), + module_type: amplifier_module::ModuleType::Tool as i32, + ..Default::default() + }) + } + amplifier_module::ModuleType::Provider => self + .coordinator + .get_provider(module_name) + .map(|provider| amplifier_module::ModuleInfo { + name: provider.name().to_string(), + module_type: amplifier_module::ModuleType::Provider as i32, + ..Default::default() + }), + amplifier_module::ModuleType::Unspecified => { + // Search tools first, then providers + if let Some(tool) = self.coordinator.get_tool(module_name) { + Some(amplifier_module::ModuleInfo { + name: tool.name().to_string(), + module_type: amplifier_module::ModuleType::Tool as i32, + ..Default::default() + }) + } else { + self.coordinator.get_provider(module_name).map(|provider| { + amplifier_module::ModuleInfo { + name: provider.name().to_string(), + module_type: amplifier_module::ModuleType::Provider as i32, + ..Default::default() + } + }) + } + } + // Hook, Memory, Guardrail, Approval — not yet stored by name in Coordinator + _ => None, + }; + + match found_info { + Some(info) => Ok(Response::new(amplifier_module::GetMountedModuleResponse { + found: true, + info: Some(info), + })), + None => Ok(Response::new(amplifier_module::GetMountedModuleResponse { + found: false, + info: None, + })), + } } async fn register_capability( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented( - "RegisterCapability not yet implemented", - )) + let req = request.into_inner(); + + // Enforce payload size limit before parsing + validate_json_size(&req.value_json, "value_json")?; + + let value: serde_json::Value = serde_json::from_str(&req.value_json).map_err(|e| { + log::debug!( + "register_capability value_json parse error for '{}': {e}", + req.name + ); + Status::invalid_argument("Invalid value_json") + })?; + self.coordinator.register_capability(&req.name, value); + Ok(Response::new(amplifier_module::Empty {})) } async fn get_capability( &self, - _request: Request, + request: Request, ) -> Result, Status> { - Err(Status::unimplemented("GetCapability not yet implemented")) + let req = request.into_inner(); + match self.coordinator.get_capability(&req.name) { + Some(value) => { + let value_json = serde_json::to_string(&value).map_err(|e| { + log::error!("Failed to serialize capability '{}': {e}", req.name); + Status::internal("Failed to serialize capability") + })?; + Ok(Response::new(amplifier_module::GetCapabilityResponse { + found: true, + value_json, + })) + } + None => Ok(Response::new(amplifier_module::GetCapabilityResponse { + found: false, + value_json: String::new(), + })), + } } } @@ -163,4 +538,1266 @@ mod tests { let coord = Arc::new(Coordinator::new(Default::default())); let _service = KernelServiceImpl::new(coord); } + + // ----------------------------------------------------------------------- + // EmitHook tests + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn emit_hook_with_no_handlers_returns_continue() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".to_string(), + data_json: r#"{"key": "value"}"#.to_string(), + }); + + let result = service.emit_hook(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + let inner = result.unwrap().into_inner(); + assert_eq!(inner.action, amplifier_module::HookAction::Continue as i32); + } + + #[tokio::test] + async fn emit_hook_calls_registered_handler() { + use crate::testing::FakeHookHandler; + + let coord = Arc::new(Coordinator::new(Default::default())); + let handler = Arc::new(FakeHookHandler::new()); + coord + .hooks() + .register("test:event", handler.clone(), 0, Some("test-hook".into())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".to_string(), + data_json: r#"{"key": "value"}"#.to_string(), + }); + + let result = service.emit_hook(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + + let events = handler.recorded_events(); + assert_eq!(events.len(), 1, "Handler should have been called once"); + assert_eq!(events[0].0, "test:event"); + } + + #[tokio::test] + async fn emit_hook_returns_handler_result() { + use crate::models::{HookAction, HookResult}; + use crate::testing::FakeHookHandler; + + let coord = Arc::new(Coordinator::new(Default::default())); + let deny_result = HookResult { + action: HookAction::Deny, + reason: Some("blocked by test".into()), + ..Default::default() + }; + let handler = Arc::new(FakeHookHandler::with_result(deny_result)); + coord + .hooks() + .register("test:event", handler, 0, Some("deny-hook".into())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".to_string(), + data_json: String::new(), + }); + + let result = service.emit_hook(request).await.unwrap(); + let inner = result.into_inner(); + assert_eq!( + inner.action, + amplifier_module::HookAction::Deny as i32, + "Expected Deny action from handler" + ); + assert_eq!(inner.reason, "blocked by test"); + } + + #[tokio::test] + async fn emit_hook_invalid_json_returns_invalid_argument() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".to_string(), + data_json: "not-valid-json{{{".to_string(), + }); + + let result = service.emit_hook(request).await; + assert!(result.is_err()); + assert_eq!(result.unwrap_err().code(), tonic::Code::InvalidArgument); + } + + #[tokio::test] + async fn emit_hook_empty_data_json_uses_empty_object() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".to_string(), + data_json: String::new(), // empty → should default to {} + }); + + // With no handlers, should still succeed (Continue result) + let result = service.emit_hook(request).await; + assert!( + result.is_ok(), + "Empty data_json should succeed, got: {result:?}" + ); + } + + // ----------------------------------------------------------------------- + // EmitHookAndCollect tests + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn emit_hook_and_collect_with_no_handlers_returns_empty() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookAndCollectRequest { + event: "test:event".to_string(), + data_json: String::new(), + timeout_seconds: 5.0, + }); + + let result = service.emit_hook_and_collect(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + let inner = result.unwrap().into_inner(); + assert!( + inner.responses_json.is_empty(), + "Expected empty responses with no handlers" + ); + } + + #[tokio::test] + async fn emit_hook_and_collect_returns_data_from_handlers() { + use crate::models::HookResult; + use crate::testing::FakeHookHandler; + use std::collections::HashMap; + + let coord = Arc::new(Coordinator::new(Default::default())); + + let mut data_map = HashMap::new(); + data_map.insert("result".to_string(), serde_json::json!("from-handler")); + let result_with_data = HookResult { + data: Some(data_map), + ..Default::default() + }; + let handler = Arc::new(FakeHookHandler::with_result(result_with_data)); + coord + .hooks() + .register("collect:event", handler, 0, Some("data-hook".into())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookAndCollectRequest { + event: "collect:event".to_string(), + data_json: r#"{"input": "test"}"#.to_string(), + timeout_seconds: 5.0, + }); + + let result = service.emit_hook_and_collect(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + let inner = result.unwrap().into_inner(); + assert_eq!( + inner.responses_json.len(), + 1, + "Expected 1 response from handler" + ); + + let parsed: serde_json::Value = + serde_json::from_str(&inner.responses_json[0]).expect("response must be valid JSON"); + assert_eq!(parsed["result"], serde_json::json!("from-handler")); + } + + #[tokio::test] + async fn emit_hook_and_collect_multiple_handlers_returns_all_data() { + use crate::models::HookResult; + use crate::testing::FakeHookHandler; + use std::collections::HashMap; + + let coord = Arc::new(Coordinator::new(Default::default())); + + for i in 0..3u32 { + let mut data_map = HashMap::new(); + data_map.insert("handler_id".to_string(), serde_json::json!(i)); + let result_with_data = HookResult { + data: Some(data_map), + ..Default::default() + }; + let handler = Arc::new(FakeHookHandler::with_result(result_with_data)); + coord.hooks().register( + "multi:event", + handler, + i as i32, + Some(format!("handler-{i}")), + ); + } + + let service = KernelServiceImpl::new(coord); + let request = Request::new(amplifier_module::EmitHookAndCollectRequest { + event: "multi:event".to_string(), + data_json: String::new(), + timeout_seconds: 5.0, + }); + + let result = service.emit_hook_and_collect(request).await.unwrap(); + let inner = result.into_inner(); + assert_eq!( + inner.responses_json.len(), + 3, + "Expected 3 responses from 3 handlers" + ); + } + + #[tokio::test] + async fn emit_hook_and_collect_invalid_json_returns_invalid_argument() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookAndCollectRequest { + event: "test:event".to_string(), + data_json: "bad-json{{".to_string(), + timeout_seconds: 5.0, + }); + + let result = service.emit_hook_and_collect(request).await; + assert!(result.is_err()); + assert_eq!(result.unwrap_err().code(), tonic::Code::InvalidArgument); + } + + // ----------------------------------------------------------------------- + // RegisterCapability tests + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn register_capability_stores_value() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord.clone()); + + let request = Request::new(amplifier_module::RegisterCapabilityRequest { + name: "my-cap".to_string(), + value_json: r#"{"key":"value"}"#.to_string(), + }); + + let result = service.register_capability(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + + // Verify the capability is actually stored + let stored = coord.get_capability("my-cap"); + assert_eq!(stored, Some(serde_json::json!({"key": "value"}))); + } + + #[tokio::test] + async fn register_capability_invalid_json_returns_invalid_argument() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::RegisterCapabilityRequest { + name: "my-cap".to_string(), + value_json: "not-valid-json{{{".to_string(), + }); + + let result = service.register_capability(request).await; + assert!(result.is_err()); + assert_eq!(result.unwrap_err().code(), tonic::Code::InvalidArgument); + } + + // ----------------------------------------------------------------------- + // GetCapability tests + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn get_capability_returns_found_true_when_registered() { + let coord = Arc::new(Coordinator::new(Default::default())); + coord.register_capability("streaming", serde_json::json!(true)); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetCapabilityRequest { + name: "streaming".to_string(), + }); + + let result = service.get_capability(request).await.unwrap(); + let inner = result.into_inner(); + assert!(inner.found); + let parsed: serde_json::Value = serde_json::from_str(&inner.value_json).unwrap(); + assert_eq!(parsed, serde_json::json!(true)); + } + + #[tokio::test] + async fn get_capability_returns_found_false_when_missing() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetCapabilityRequest { + name: "nonexistent".to_string(), + }); + + let result = service.get_capability(request).await.unwrap(); + let inner = result.into_inner(); + assert!(!inner.found); + assert!(inner.value_json.is_empty()); + } + + #[tokio::test] + async fn register_then_get_capability_roundtrip() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + // Register + let reg_request = Request::new(amplifier_module::RegisterCapabilityRequest { + name: "config".to_string(), + value_json: r#"{"model":"gpt-4","max_tokens":1000}"#.to_string(), + }); + service.register_capability(reg_request).await.unwrap(); + + // Get + let get_request = Request::new(amplifier_module::GetCapabilityRequest { + name: "config".to_string(), + }); + let result = service.get_capability(get_request).await.unwrap(); + let inner = result.into_inner(); + assert!(inner.found); + let parsed: serde_json::Value = serde_json::from_str(&inner.value_json).unwrap(); + assert_eq!(parsed["model"], serde_json::json!("gpt-4")); + assert_eq!(parsed["max_tokens"], serde_json::json!(1000)); + } + + // ----------------------------------------------------------------------- + // GetMountedModule tests + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn get_mounted_module_finds_tool_by_name() { + use crate::testing::FakeTool; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_tool("my-tool", Arc::new(FakeTool::new("my-tool", "a test tool"))); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetMountedModuleRequest { + module_name: "my-tool".to_string(), + module_type: amplifier_module::ModuleType::Tool as i32, + }); + + let result = service.get_mounted_module(request).await.unwrap(); + let inner = result.into_inner(); + assert!(inner.found, "Expected found=true for mounted tool"); + let info = inner.info.expect("Expected ModuleInfo to be present"); + assert_eq!(info.name, "my-tool"); + assert_eq!(info.module_type, amplifier_module::ModuleType::Tool as i32); + } + + #[tokio::test] + async fn get_mounted_module_returns_not_found_for_missing_tool() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetMountedModuleRequest { + module_name: "nonexistent-tool".to_string(), + module_type: amplifier_module::ModuleType::Tool as i32, + }); + + let result = service.get_mounted_module(request).await.unwrap(); + let inner = result.into_inner(); + assert!(!inner.found, "Expected found=false for missing tool"); + assert!(inner.info.is_none()); + } + + #[tokio::test] + async fn get_mounted_module_finds_provider_by_name() { + use crate::testing::FakeProvider; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_provider("openai", Arc::new(FakeProvider::new("openai", "hello"))); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetMountedModuleRequest { + module_name: "openai".to_string(), + module_type: amplifier_module::ModuleType::Provider as i32, + }); + + let result = service.get_mounted_module(request).await.unwrap(); + let inner = result.into_inner(); + assert!(inner.found, "Expected found=true for mounted provider"); + let info = inner.info.expect("Expected ModuleInfo to be present"); + assert_eq!(info.name, "openai"); + assert_eq!( + info.module_type, + amplifier_module::ModuleType::Provider as i32 + ); + } + + #[tokio::test] + async fn get_mounted_module_unspecified_type_finds_tool() { + use crate::testing::FakeTool; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_tool("bash", Arc::new(FakeTool::new("bash", "runs bash"))); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetMountedModuleRequest { + module_name: "bash".to_string(), + module_type: amplifier_module::ModuleType::Unspecified as i32, + }); + + let result = service.get_mounted_module(request).await.unwrap(); + let inner = result.into_inner(); + assert!(inner.found, "UNSPECIFIED type should find a mounted tool"); + let info = inner.info.expect("Expected ModuleInfo to be present"); + assert_eq!(info.name, "bash"); + assert_eq!(info.module_type, amplifier_module::ModuleType::Tool as i32); + } + + #[tokio::test] + async fn get_mounted_module_unspecified_type_finds_provider() { + use crate::testing::FakeProvider; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_provider("anthropic", Arc::new(FakeProvider::new("anthropic", "hi"))); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetMountedModuleRequest { + module_name: "anthropic".to_string(), + module_type: amplifier_module::ModuleType::Unspecified as i32, + }); + + let result = service.get_mounted_module(request).await.unwrap(); + let inner = result.into_inner(); + assert!( + inner.found, + "UNSPECIFIED type should find a mounted provider" + ); + let info = inner.info.expect("Expected ModuleInfo to be present"); + assert_eq!(info.name, "anthropic"); + assert_eq!( + info.module_type, + amplifier_module::ModuleType::Provider as i32 + ); + } + + #[tokio::test] + async fn get_mounted_module_wrong_type_returns_not_found() { + use crate::testing::FakeTool; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_tool("my-tool", Arc::new(FakeTool::new("my-tool", "a test tool"))); + let service = KernelServiceImpl::new(coord); + + // Tool is mounted but we query as PROVIDER type — should not find it + let request = Request::new(amplifier_module::GetMountedModuleRequest { + module_name: "my-tool".to_string(), + module_type: amplifier_module::ModuleType::Provider as i32, + }); + + let result = service.get_mounted_module(request).await.unwrap(); + let inner = result.into_inner(); + assert!( + !inner.found, + "Querying a tool name as PROVIDER type should return not found" + ); + } + + // ----------------------------------------------------------------------- + // AddMessage tests + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn add_message_stores_message_in_context() { + use crate::testing::FakeContextManager; + use crate::traits::ContextManager as _; + let coord = Arc::new(Coordinator::new(Default::default())); + let ctx = Arc::new(FakeContextManager::new()); + coord.set_context(ctx.clone()); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::KernelAddMessageRequest { + session_id: String::new(), + message: Some(amplifier_module::Message { + role: amplifier_module::Role::User as i32, + content: Some(amplifier_module::message::Content::TextContent( + "Hello from gRPC".to_string(), + )), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }), + }); + + let result = service.add_message(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + + // Verify message was stored in context + let messages = ctx.get_messages().await.unwrap(); + assert_eq!(messages.len(), 1); + assert_eq!(messages[0]["role"], "user"); + } + + #[tokio::test] + async fn add_message_no_context_returns_failed_precondition() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::KernelAddMessageRequest { + session_id: String::new(), + message: Some(amplifier_module::Message { + role: amplifier_module::Role::User as i32, + content: Some(amplifier_module::message::Content::TextContent( + "Hello".to_string(), + )), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }), + }); + + let result = service.add_message(request).await; + assert!(result.is_err()); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::FailedPrecondition, + "Should return FailedPrecondition when no context mounted" + ); + } + + #[tokio::test] + async fn add_message_missing_message_field_returns_invalid_argument() { + use crate::testing::FakeContextManager; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.set_context(Arc::new(FakeContextManager::new())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::KernelAddMessageRequest { + session_id: String::new(), + message: None, // no message + }); + + let result = service.add_message(request).await; + assert!(result.is_err()); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::InvalidArgument, + "Should return InvalidArgument when message field is missing" + ); + } + + // ----------------------------------------------------------------------- + // GetMessages tests + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn get_messages_returns_stored_messages() { + use crate::testing::FakeContextManager; + use crate::traits::ContextManager as _; + let coord = Arc::new(Coordinator::new(Default::default())); + let ctx = Arc::new(FakeContextManager::new()); + // Pre-populate context with two messages via Value + ctx.add_message(serde_json::json!({"role": "user", "content": "hi"})) + .await + .unwrap(); + ctx.add_message(serde_json::json!({"role": "assistant", "content": "hello"})) + .await + .unwrap(); + coord.set_context(ctx); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetMessagesRequest { + session_id: String::new(), + }); + + let result = service.get_messages(request).await.unwrap(); + let inner = result.into_inner(); + assert_eq!(inner.messages.len(), 2, "Expected 2 messages"); + } + + #[tokio::test] + async fn get_messages_empty_context_returns_empty_list() { + use crate::testing::FakeContextManager; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.set_context(Arc::new(FakeContextManager::new())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetMessagesRequest { + session_id: String::new(), + }); + + let result = service.get_messages(request).await.unwrap(); + let inner = result.into_inner(); + assert!(inner.messages.is_empty(), "Expected empty messages list"); + } + + // ----------------------------------------------------------------------- + // H-04: Session ID routing documentation + // ----------------------------------------------------------------------- + + /// Session ID is received and logged but does NOT affect routing. + /// Each KernelServiceImpl is scoped to one Coordinator — cross-session + /// isolation requires deploying separate KernelService instances per session. + #[tokio::test] + async fn get_messages_session_id_received_but_does_not_affect_routing() { + use crate::testing::FakeContextManager; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.set_context(Arc::new(FakeContextManager::new())); + let service = KernelServiceImpl::new(coord); + + // Even with an explicit session_id, routing is to the single scoped coordinator + let request = Request::new(amplifier_module::GetMessagesRequest { + session_id: "explicit-session-abc123".to_string(), + }); + + let result = service.get_messages(request).await; + assert!( + result.is_ok(), + "get_messages must succeed regardless of session_id value; got: {result:?}" + ); + } + + /// add_message: session_id is received and logged but does NOT affect routing. + #[tokio::test] + async fn add_message_session_id_received_but_does_not_affect_routing() { + use crate::testing::FakeContextManager; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.set_context(Arc::new(FakeContextManager::new())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::KernelAddMessageRequest { + session_id: "explicit-session-abc123".to_string(), + message: Some(amplifier_module::Message { + role: amplifier_module::Role::User as i32, + content: Some(amplifier_module::message::Content::TextContent( + "hello".to_string(), + )), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }), + }); + + let result = service.add_message(request).await; + assert!( + result.is_ok(), + "add_message must succeed regardless of session_id value; got: {result:?}" + ); + } + + #[tokio::test] + async fn get_messages_no_context_returns_failed_precondition() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::GetMessagesRequest { + session_id: String::new(), + }); + + let result = service.get_messages(request).await; + assert!(result.is_err()); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::FailedPrecondition, + "Should return FailedPrecondition when no context mounted" + ); + } + + // ----------------------------------------------------------------------- + // CompleteWithProvider tests + // ----------------------------------------------------------------------- + + /// Build a minimal proto ChatRequest with a single user message. + fn make_chat_request(text: &str) -> amplifier_module::ChatRequest { + amplifier_module::ChatRequest { + messages: vec![amplifier_module::Message { + role: amplifier_module::Role::User as i32, + content: Some(amplifier_module::message::Content::TextContent( + text.to_string(), + )), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }], + tools: vec![], + response_format: None, + temperature: 0.0, + top_p: 0.0, + max_output_tokens: 0, + conversation_id: String::new(), + stream: false, + metadata_json: String::new(), + model: String::new(), + tool_choice: String::new(), + stop: vec![], + reasoning_effort: String::new(), + timeout: 0.0, + } + } + + #[tokio::test] + async fn complete_with_provider_returns_response_from_mounted_provider() { + use crate::testing::FakeProvider; + + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_provider( + "openai", + Arc::new(FakeProvider::new("openai", "hello from openai")), + ); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "openai".to_string(), + request: Some(make_chat_request("ping")), + }); + + let result = service.complete_with_provider(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + let inner = result.unwrap().into_inner(); + // The content field contains JSON-serialized ContentBlocks + assert!(!inner.content.is_empty(), "Expected non-empty content"); + assert!( + inner.content.contains("hello from openai"), + "Expected response to contain provider text, got: {}", + inner.content + ); + } + + #[tokio::test] + async fn complete_with_provider_not_found_returns_not_found_status() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "nonexistent-provider".to_string(), + request: Some(make_chat_request("hello")), + }); + + let result = service.complete_with_provider(request).await; + assert!(result.is_err()); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::NotFound, + "Should return NotFound when provider is not mounted" + ); + } + + #[tokio::test] + async fn complete_with_provider_missing_request_returns_invalid_argument() { + use crate::testing::FakeProvider; + + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_provider("openai", Arc::new(FakeProvider::new("openai", "response"))); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "openai".to_string(), + request: None, // missing request + }); + + let result = service.complete_with_provider(request).await; + assert!(result.is_err()); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::InvalidArgument, + "Should return InvalidArgument when request field is missing" + ); + } + + #[tokio::test] + async fn complete_with_provider_records_call_in_provider() { + use crate::testing::FakeProvider; + + let coord = Arc::new(Coordinator::new(Default::default())); + let fake_provider = Arc::new(FakeProvider::new("anthropic", "recorded response")); + coord.mount_provider("anthropic", fake_provider.clone()); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "anthropic".to_string(), + request: Some(make_chat_request("test message")), + }); + + let result = service.complete_with_provider(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + + let calls = fake_provider.recorded_calls(); + assert_eq!(calls.len(), 1, "Provider should have been called once"); + assert_eq!(calls[0].messages.len(), 1); + } + + // ----------------------------------------------------------------------- + // CompleteWithProviderStreaming tests + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn complete_with_provider_streaming_returns_single_response() { + use crate::testing::FakeProvider; + use tokio_stream::StreamExt as _; + + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_provider( + "openai", + Arc::new(FakeProvider::new("openai", "streamed hello")), + ); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "openai".to_string(), + request: Some(make_chat_request("ping")), + }); + + let result = service.complete_with_provider_streaming(request).await; + assert!(result.is_ok(), "Expected Ok, got: {result:?}"); + + let mut stream = result.unwrap().into_inner(); + let mut chunks = Vec::new(); + while let Some(item) = stream.next().await { + chunks.push(item); + } + + assert_eq!(chunks.len(), 1, "Expected exactly one streamed chunk"); + let response = chunks.remove(0).expect("Expected Ok chunk"); + assert!( + response.content.contains("streamed hello"), + "Expected response to contain provider text, got: {}", + response.content + ); + } + + #[tokio::test] + async fn complete_with_provider_streaming_not_found_returns_error() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "nonexistent".to_string(), + request: Some(make_chat_request("ping")), + }); + + let result = service.complete_with_provider_streaming(request).await; + assert!(result.is_err()); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::NotFound, + "Should return NotFound when provider is not mounted" + ); + } + + #[tokio::test] + async fn complete_with_provider_streaming_missing_request_returns_invalid_argument() { + use crate::testing::FakeProvider; + + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_provider("openai", Arc::new(FakeProvider::new("openai", "response"))); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "openai".to_string(), + request: None, // missing request + }); + + let result = service.complete_with_provider_streaming(request).await; + assert!(result.is_err()); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::InvalidArgument, + "Should return InvalidArgument when request field is missing" + ); + } + + // ----------------------------------------------------------------------- + // AuthInterceptor tests (C-01) + // ----------------------------------------------------------------------- + + // ----------------------------------------------------------------------- + // H-07: JSON payload size limits + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn execute_tool_rejects_oversized_input_json() { + use crate::testing::FakeTool; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_tool("my-tool", Arc::new(FakeTool::new("my-tool", "a test tool"))); + let service = KernelServiceImpl::new(coord); + + // 128 KB of JSON — exceeds the 64 KB limit + let big_value = "x".repeat(128 * 1024); + let oversized_json = format!("\"{}\"", big_value); + + let request = Request::new(amplifier_module::ExecuteToolRequest { + tool_name: "my-tool".to_string(), + input_json: oversized_json, + }); + + let result = service.execute_tool(request).await; + assert!(result.is_err(), "Expected error for oversized input_json"); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::InvalidArgument, + "Expected InvalidArgument for oversized payload" + ); + } + + #[tokio::test] + async fn emit_hook_rejects_oversized_data_json() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let big_value = "x".repeat(128 * 1024); + let oversized_json = format!("\"{}\"", big_value); + + let request = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".to_string(), + data_json: oversized_json, + }); + + let result = service.emit_hook(request).await; + assert!(result.is_err(), "Expected error for oversized data_json"); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::InvalidArgument, + "Expected InvalidArgument for oversized payload" + ); + } + + #[tokio::test] + async fn emit_hook_and_collect_rejects_oversized_data_json() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let big_value = "x".repeat(128 * 1024); + let oversized_json = format!("\"{}\"", big_value); + + let request = Request::new(amplifier_module::EmitHookAndCollectRequest { + event: "test:event".to_string(), + data_json: oversized_json, + timeout_seconds: 5.0, + }); + + let result = service.emit_hook_and_collect(request).await; + assert!(result.is_err(), "Expected error for oversized data_json"); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::InvalidArgument, + "Expected InvalidArgument for oversized payload" + ); + } + + #[tokio::test] + async fn register_capability_rejects_oversized_value_json() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let big_value = "x".repeat(128 * 1024); + let oversized_json = format!("\"{}\"", big_value); + + let request = Request::new(amplifier_module::RegisterCapabilityRequest { + name: "my-cap".to_string(), + value_json: oversized_json, + }); + + let result = service.register_capability(request).await; + assert!(result.is_err(), "Expected error for oversized value_json"); + assert_eq!( + result.unwrap_err().code(), + tonic::Code::InvalidArgument, + "Expected InvalidArgument for oversized payload" + ); + } + + /// Payloads at or under 64 KB must still be accepted. + #[tokio::test] + async fn execute_tool_accepts_payload_at_size_limit() { + use crate::testing::FakeTool; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_tool("my-tool", Arc::new(FakeTool::new("my-tool", "a test tool"))); + let service = KernelServiceImpl::new(coord); + + // Exactly 64 KB of quoted string content ← should succeed + let at_limit = "x".repeat(64 * 1024 - 2); // subtract 2 for the surrounding quotes + let at_limit_json = format!("\"{}\"", at_limit); + assert_eq!(at_limit_json.len(), 64 * 1024); + + let request = Request::new(amplifier_module::ExecuteToolRequest { + tool_name: "my-tool".to_string(), + input_json: at_limit_json, + }); + + let result = service.execute_tool(request).await; + assert!( + result.is_ok(), + "Payload at exactly the size limit must be accepted; got: {result:?}" + ); + } + + #[test] + fn auth_interceptor_rejects_missing_token() { + use tonic::service::Interceptor as _; + + let mut interceptor = AuthInterceptor::new("secret-token".to_string()); + // Request with no metadata header + let request = tonic::Request::new(()); + let result = interceptor.call(request); + + assert!(result.is_err(), "Expected Err for missing token"); + let status = result.unwrap_err(); + assert_eq!( + status.code(), + tonic::Code::Unauthenticated, + "Expected Unauthenticated, got: {status:?}" + ); + assert!( + status.message().contains("missing"), + "Expected 'missing' in message, got: {}", + status.message() + ); + } + + #[test] + fn auth_interceptor_rejects_wrong_token() { + use tonic::service::Interceptor as _; + + let mut interceptor = AuthInterceptor::new("correct-token".to_string()); + let mut request = tonic::Request::new(()); + request + .metadata_mut() + .insert("x-amplifier-token", "wrong-token".parse().unwrap()); + let result = interceptor.call(request); + + assert!(result.is_err(), "Expected Err for wrong token"); + let status = result.unwrap_err(); + assert_eq!( + status.code(), + tonic::Code::Unauthenticated, + "Expected Unauthenticated, got: {status:?}" + ); + assert!( + status.message().contains("invalid"), + "Expected 'invalid' in message, got: {}", + status.message() + ); + } + + #[test] + fn auth_interceptor_accepts_correct_token() { + use tonic::service::Interceptor as _; + + let token = "my-shared-secret"; + let mut interceptor = AuthInterceptor::new(token.to_string()); + let mut request = tonic::Request::new(()); + request + .metadata_mut() + .insert("x-amplifier-token", token.parse().unwrap()); + let result = interceptor.call(request); + + assert!( + result.is_ok(), + "Expected Ok for correct token, got: {result:?}" + ); + } + + #[test] + fn new_with_auth_returns_service_and_nonempty_token() { + let coord = Arc::new(Coordinator::new(Default::default())); + let (svc, token) = KernelServiceImpl::new_with_auth(coord); + assert!(!token.is_empty(), "Token must not be empty"); + // Sanity-check: UUID v4 is 36 chars (8-4-4-4-12 with dashes) + assert_eq!( + token.len(), + 36, + "Expected UUID-format token (len 36), got: {token}" + ); + // Verify the service is usable + let _ = svc; + } + + #[test] + fn new_with_auth_tokens_are_unique() { + let coord1 = Arc::new(Coordinator::new(Default::default())); + let coord2 = Arc::new(Coordinator::new(Default::default())); + let (_, token1) = KernelServiceImpl::new_with_auth(coord1); + let (_, token2) = KernelServiceImpl::new_with_auth(coord2); + assert_ne!(token1, token2, "Each call must produce a unique token"); + } + + // ----------------------------------------------------------------------- + // H-02: Error message sanitization — no internal details leaked to caller + // ----------------------------------------------------------------------- + + #[tokio::test] + async fn complete_with_provider_not_found_message_is_generic() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "secret-internal-provider".to_string(), + request: Some(make_chat_request("ping")), + }); + + let status = service.complete_with_provider(request).await.unwrap_err(); + assert_eq!(status.code(), tonic::Code::NotFound); + // Provider name must NOT leak to the caller + assert!( + !status.message().contains("secret-internal-provider"), + "Provider name must not appear in error message, got: {}", + status.message() + ); + } + + #[tokio::test] + async fn complete_with_provider_streaming_not_found_message_is_generic() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "secret-internal-provider".to_string(), + request: Some(make_chat_request("ping")), + }); + + let status = service + .complete_with_provider_streaming(request) + .await + .unwrap_err(); + assert_eq!(status.code(), tonic::Code::NotFound); + assert!( + !status.message().contains("secret-internal-provider"), + "Provider name must not appear in error message, got: {}", + status.message() + ); + } + + #[tokio::test] + async fn execute_tool_not_found_message_is_generic() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::ExecuteToolRequest { + tool_name: "secret-internal-tool".to_string(), + input_json: "{}".to_string(), + }); + + let status = service.execute_tool(request).await.unwrap_err(); + assert_eq!(status.code(), tonic::Code::NotFound); + assert!( + !status.message().contains("secret-internal-tool"), + "Tool name must not appear in error message, got: {}", + status.message() + ); + } + + #[tokio::test] + async fn emit_hook_invalid_json_message_has_no_serde_details() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".to_string(), + data_json: "not-valid-json{{{".to_string(), + }); + + let status = service.emit_hook(request).await.unwrap_err(); + assert_eq!(status.code(), tonic::Code::InvalidArgument); + // The message should be exactly "Invalid data_json" with no serde details + assert_eq!( + status.message(), + "Invalid data_json", + "Expected generic message, got: {}", + status.message() + ); + } + + #[tokio::test] + async fn emit_hook_and_collect_invalid_json_message_has_no_serde_details() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::EmitHookAndCollectRequest { + event: "test:event".to_string(), + data_json: "bad-json{{".to_string(), + timeout_seconds: 5.0, + }); + + let status = service.emit_hook_and_collect(request).await.unwrap_err(); + assert_eq!(status.code(), tonic::Code::InvalidArgument); + assert_eq!( + status.message(), + "Invalid data_json", + "Expected generic message, got: {}", + status.message() + ); + } + + #[tokio::test] + async fn execute_tool_invalid_input_json_message_has_no_serde_details() { + use crate::testing::FakeTool; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_tool("my-tool", Arc::new(FakeTool::new("my-tool", "a test tool"))); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::ExecuteToolRequest { + tool_name: "my-tool".to_string(), + input_json: "not-valid-json{{{".to_string(), + }); + + let status = service.execute_tool(request).await.unwrap_err(); + assert_eq!(status.code(), tonic::Code::InvalidArgument); + assert_eq!( + status.message(), + "Invalid input JSON", + "Expected generic message, got: {}", + status.message() + ); + } + + #[tokio::test] + async fn register_capability_invalid_json_message_has_no_serde_details() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::RegisterCapabilityRequest { + name: "my-cap".to_string(), + value_json: "not-valid-json{{{".to_string(), + }); + + let status = service.register_capability(request).await.unwrap_err(); + assert_eq!(status.code(), tonic::Code::InvalidArgument); + assert_eq!( + status.message(), + "Invalid value_json", + "Expected generic message, got: {}", + status.message() + ); + } + + #[tokio::test] + async fn add_then_get_messages_roundtrip() { + use crate::testing::FakeContextManager; + let coord = Arc::new(Coordinator::new(Default::default())); + coord.set_context(Arc::new(FakeContextManager::new())); + let service = KernelServiceImpl::new(coord); + + // Add a message + let add_request = Request::new(amplifier_module::KernelAddMessageRequest { + session_id: String::new(), + message: Some(amplifier_module::Message { + role: amplifier_module::Role::User as i32, + content: Some(amplifier_module::message::Content::TextContent( + "Test message content".to_string(), + )), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }), + }); + service.add_message(add_request).await.unwrap(); + + // Get messages back + let get_request = Request::new(amplifier_module::GetMessagesRequest { + session_id: String::new(), + }); + let result = service.get_messages(get_request).await.unwrap(); + let inner = result.into_inner(); + assert_eq!(inner.messages.len(), 1); + assert_eq!(inner.messages[0].role, amplifier_module::Role::User as i32); + // Verify content is a text block + match &inner.messages[0].content { + Some(amplifier_module::message::Content::TextContent(text)) => { + assert_eq!(text, "Test message content"); + } + other => panic!("Expected TextContent, got: {other:?}"), + } + } } diff --git a/crates/amplifier-core/src/lib.rs b/crates/amplifier-core/src/lib.rs index ee9c554..3390987 100644 --- a/crates/amplifier-core/src/lib.rs +++ b/crates/amplifier-core/src/lib.rs @@ -28,11 +28,14 @@ pub mod grpc_server; pub mod hooks; pub mod messages; pub mod models; +pub mod module_resolver; pub mod retry; pub mod session; pub mod testing; pub mod traits; pub mod transport; +#[cfg(feature = "wasm")] +pub mod wasm_engine; // --------------------------------------------------------------------------- // Re-exports — consumers write `use amplifier_core::Tool`, not diff --git a/crates/amplifier-core/src/models.rs b/crates/amplifier-core/src/models.rs index baf7f4a..b18b576 100644 --- a/crates/amplifier-core/src/models.rs +++ b/crates/amplifier-core/src/models.rs @@ -86,6 +86,7 @@ pub enum ModuleType { Context, Hook, Resolver, + Approval, } /// Session state. @@ -803,6 +804,10 @@ mod tests { serde_json::to_value(ModuleType::Resolver).unwrap(), json!("resolver") ); + assert_eq!( + serde_json::to_value(ModuleType::Approval).unwrap(), + json!("approval") + ); } #[test] diff --git a/crates/amplifier-core/src/module_resolver.rs b/crates/amplifier-core/src/module_resolver.rs new file mode 100644 index 0000000..1581cde --- /dev/null +++ b/crates/amplifier-core/src/module_resolver.rs @@ -0,0 +1,1188 @@ +//! Cross-language module resolver. +//! +//! Given a filesystem path, inspects its contents and determines: +//! - What transport to use (Python, WASM, gRPC) +//! - What module type it is (Tool, Provider, Orchestrator, etc.) +//! - Where the loadable artifact is +//! +//! Detection order (first match wins): +//! 1. `amplifier.toml` (explicit override) +//! 2. `.wasm` files (auto-detect via Component Model metadata) +//! 3. Python package (`__init__.py` fallback) +//! 4. Error + +use std::path::{Path, PathBuf}; +#[cfg(feature = "wasm")] +use std::sync::Arc; + +use crate::models::ModuleType; +use crate::transport::Transport; + +/// Known WASM Component Model interface prefixes mapped to module types. +/// +/// Export names in a WASM component include a version suffix (e.g., `@1.0.0`), +/// so we match using `starts_with` against these prefixes. +#[cfg(feature = "wasm")] +const KNOWN_INTERFACES: &[(&str, ModuleType)] = &[ + ("amplifier:modules/tool", ModuleType::Tool), + ("amplifier:modules/hook-handler", ModuleType::Hook), + ("amplifier:modules/context-manager", ModuleType::Context), + ("amplifier:modules/approval-provider", ModuleType::Approval), + ("amplifier:modules/provider", ModuleType::Provider), + ("amplifier:modules/orchestrator", ModuleType::Orchestrator), +]; + +/// Detect the module type of a WASM component by inspecting its exports. +/// +/// Loads the component using `wasmtime::component::Component::new`, iterates +/// over its exports, and matches export names against [`KNOWN_INTERFACES`]. +/// +/// Returns `Ok(ModuleType)` if exactly one known interface is found. +/// Returns `UnknownWasmInterface` if zero matches, `AmbiguousWasmInterface` +/// if more than one match. +#[cfg(feature = "wasm")] +pub fn detect_wasm_module_type( + wasm_bytes: &[u8], + engine: Arc, + wasm_path: &Path, +) -> Result { + let component = wasmtime::component::Component::new(&engine, wasm_bytes).map_err(|e| { + ModuleResolverError::WasmLoadError { + path: wasm_path.to_path_buf(), + reason: e.to_string(), + } + })?; + + let component_type = component.component_type(); + let mut matched: Vec<(&str, ModuleType)> = Vec::new(); + + for (export_name, _) in component_type.exports(&engine) { + for &(prefix, ref module_type) in KNOWN_INTERFACES { + if export_name.starts_with(prefix) { + matched.push((prefix, module_type.clone())); + } + } + } + + match matched.len() { + 0 => Err(ModuleResolverError::UnknownWasmInterface { + path: wasm_path.to_path_buf(), + }), + 1 => Ok(matched.into_iter().next().unwrap().1), + _ => Err(ModuleResolverError::AmbiguousWasmInterface { + path: wasm_path.to_path_buf(), + found: matched + .into_iter() + .map(|(prefix, _)| prefix.to_string()) + .collect(), + }), + } +} + +/// Parse a module type string into a `ModuleType` variant. +/// +/// Accepts lowercase strings: "orchestrator", "provider", "tool", "context", +/// "hook", "resolver", "approval". Returns `None` for unrecognized strings. +pub fn parse_module_type(s: &str) -> Option { + match s { + "orchestrator" => Some(ModuleType::Orchestrator), + "provider" => Some(ModuleType::Provider), + "tool" => Some(ModuleType::Tool), + "context" => Some(ModuleType::Context), + "hook" => Some(ModuleType::Hook), + "resolver" => Some(ModuleType::Resolver), + "approval" => Some(ModuleType::Approval), + _ => None, + } +} + +/// Parse an `amplifier.toml` file content into a `ModuleManifest`. +/// +/// The TOML must have a `[module]` section with `transport` and `type` fields. +/// For gRPC transport, a `[grpc]` section with `endpoint` is required. +/// For WASM transport, optional `artifact` field specifies the wasm filename +/// (defaults to `module.wasm`). For Python/Native transport, derive package +/// name from directory name. +pub fn parse_amplifier_toml( + content: &str, + module_path: &Path, +) -> Result { + let doc: toml::Table = + toml::from_str(content).map_err(|e| ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: e.to_string(), + })?; + + let module_section = doc + .get("module") + .and_then(|v| v.as_table()) + .ok_or_else(|| ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "missing [module] section".to_string(), + })?; + + let transport_str = module_section + .get("transport") + .and_then(|v| v.as_str()) + .unwrap_or("python"); + let transport = Transport::from_str(transport_str); + + let type_str = module_section + .get("type") + .and_then(|v| v.as_str()) + .ok_or_else(|| ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "missing 'type' field in [module] section".to_string(), + })?; + + let module_type = + parse_module_type(type_str).ok_or_else(|| ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: format!("unknown module type: {type_str}"), + })?; + + let artifact = match transport { + Transport::Grpc => { + let endpoint = doc + .get("grpc") + .and_then(|v| v.as_table()) + .and_then(|t| t.get("endpoint")) + .and_then(|v| v.as_str()) + .ok_or_else(|| ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "gRPC transport requires [grpc] section with 'endpoint' field" + .to_string(), + })?; + ModuleArtifact::GrpcEndpoint(endpoint.to_string()) + } + Transport::Wasm => { + let wasm_filename = module_section + .get("artifact") + .and_then(|v| v.as_str()) + .unwrap_or("module.wasm"); + + // H-03: Reject path separators — artifact must be a simple filename, + // not a relative or absolute path that could escape the module directory. + if wasm_filename.contains('/') + || wasm_filename.contains('\\') + || wasm_filename.starts_with('.') + { + return Err(ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "artifact must be a simple filename, not a path".to_string(), + }); + } + + let wasm_path = module_path.join(wasm_filename); + + // H-03: If the artifact already exists on disk, canonicalize both + // paths and verify the resolved artifact stays inside module_path. + // This catches symlink-based escapes that slip past the name check. + if wasm_path.exists() { + let canonical = + wasm_path + .canonicalize() + .map_err(|e| ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: format!("could not canonicalize artifact path: {e}"), + })?; + let canonical_base = module_path.canonicalize().map_err(|e| { + ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: format!("could not canonicalize module path: {e}"), + } + })?; + if !canonical.starts_with(&canonical_base) { + return Err(ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "artifact path escapes module directory".to_string(), + }); + } + } + + ModuleArtifact::WasmBytes { + bytes: Vec::new(), // bytes loaded later by the transport layer + path: wasm_path, + } + } + Transport::Python | Transport::Native => { + let dir_name = module_path + .file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_else(|| "unknown".to_string()); + ModuleArtifact::PythonModule(dir_name) + } + }; + + Ok(ModuleManifest { + transport, + module_type, + artifact, + }) +} + +/// Describes a resolved module: what transport, what type, and where the artifact is. +#[derive(Debug, Clone, PartialEq)] +pub struct ModuleManifest { + /// Transport to use for loading (Python, WASM, gRPC). + pub transport: Transport, + /// Module type (Tool, Provider, Orchestrator, etc.). + pub module_type: ModuleType, + /// Where the loadable artifact lives. + pub artifact: ModuleArtifact, +} + +/// The loadable artifact for a resolved module. +#[derive(Debug, Clone, PartialEq)] +pub enum ModuleArtifact { + /// Raw WASM component bytes, plus the path they were read from. + WasmBytes { bytes: Vec, path: PathBuf }, + /// A gRPC endpoint URL (e.g., "http://localhost:50051"). + GrpcEndpoint(String), + /// A Python package name (e.g., "amplifier_module_tool_bash"). + PythonModule(String), +} + +/// Detect a Python package at the given directory path. +/// +/// Checks two locations (first match wins): +/// 1. `dir/__init__.py` — the directory itself is a package; derive name from +/// the directory's file name, replacing dashes with underscores. +/// 2. `dir//__init__.py` — a nested package; iterate immediate +/// subdirectories looking for `__init__.py` and return the subdirectory name. +/// +/// Returns the Python package name if found, or `None`. +pub fn detect_python_package(dir: &Path) -> Option { + // Check 1: dir itself has __init__.py + if dir.join("__init__.py").is_file() { + let name = dir.file_name()?.to_string_lossy().replace('-', "_"); + return Some(name); + } + + // Check 2: a subdirectory has __init__.py + let entries = std::fs::read_dir(dir).ok()?; + for entry in entries { + let entry = match entry { + Ok(e) => e, + Err(_) => continue, + }; + let path = entry.path(); + if path.is_dir() && path.join("__init__.py").is_file() { + let name = path.file_name()?.to_string_lossy().to_string(); + return Some(name); + } + } + + None +} + +/// Resolve a module from a filesystem path. +/// +/// Inspects the directory at `path` and returns a `ModuleManifest` +/// describing the transport, module type, and artifact location. +/// +/// Detection order (first match wins): +/// 1. `amplifier.toml` — explicit manifest +/// 2. `.wasm` file — auto-detected via Component Model metadata +/// 3. Python package (`__init__.py`) — fallback with `ModuleType::Tool` +/// 4. Error (`NoArtifactFound`) +pub fn resolve_module(path: &Path) -> Result { + // Step 1: path must exist + if !path.exists() { + return Err(ModuleResolverError::PathNotFound { + path: path.to_path_buf(), + }); + } + + // Step 2: amplifier.toml takes priority + let toml_path = path.join("amplifier.toml"); + if toml_path.is_file() { + let content = std::fs::read_to_string(&toml_path).map_err(|e| ModuleResolverError::Io { + path: toml_path.clone(), + source: e, + })?; + return parse_amplifier_toml(&content, path); + } + + // Step 3: .wasm file detection + if let Some(wasm_path) = scan_for_wasm_file(path) { + #[cfg(feature = "wasm")] + { + let bytes = std::fs::read(&wasm_path).map_err(|e| ModuleResolverError::Io { + path: wasm_path.clone(), + source: e, + })?; + let engine = crate::wasm_engine::WasmEngine::new() + .map_err(|e| ModuleResolverError::WasmLoadError { + path: wasm_path.clone(), + reason: e.to_string(), + })? + .inner(); + let module_type = detect_wasm_module_type(&bytes, engine, &wasm_path)?; + return Ok(ModuleManifest { + transport: Transport::Wasm, + module_type, + artifact: ModuleArtifact::WasmBytes { + bytes, + path: wasm_path, + }, + }); + } + + #[cfg(not(feature = "wasm"))] + return Err(ModuleResolverError::WasmLoadError { + path: wasm_path, + reason: "WASM support not enabled".to_string(), + }); + } + + // Step 4: Python package fallback + if let Some(pkg_name) = detect_python_package(path) { + return Ok(ModuleManifest { + transport: Transport::Python, + module_type: ModuleType::Tool, + artifact: ModuleArtifact::PythonModule(pkg_name), + }); + } + + // Step 5: nothing found + Err(ModuleResolverError::NoArtifactFound { + path: path.to_path_buf(), + }) +} + +/// Errors from module resolution. +#[derive(Debug, thiserror::Error)] +pub enum ModuleResolverError { + /// The path does not exist or is not a directory. + #[error("module path does not exist: {path}")] + PathNotFound { path: PathBuf }, + + /// No loadable artifact found at the path. + #[error("could not detect module transport at {path}. Expected: .wasm file, amplifier.toml, or Python package (__init__.py).")] + NoArtifactFound { path: PathBuf }, + + /// WASM component does not export any known Amplifier module interface. + #[error("WASM component at {path} does not export any known Amplifier module interface. Known interfaces: amplifier:modules/tool, amplifier:modules/hook-handler, amplifier:modules/context-manager, amplifier:modules/approval-provider, amplifier:modules/provider, amplifier:modules/orchestrator")] + UnknownWasmInterface { path: PathBuf }, + + /// WASM component exports multiple Amplifier interfaces (ambiguous). + #[error("WASM component at {path} exports multiple Amplifier module interfaces ({}). A component should implement exactly one module type.", found.join(", "))] + AmbiguousWasmInterface { path: PathBuf, found: Vec }, + + /// Failed to parse `amplifier.toml`. + #[error("failed to parse amplifier.toml at {path}: {reason}")] + TomlParseError { path: PathBuf, reason: String }, + + /// Failed to read or compile a WASM file. + #[error("failed to load WASM component at {path}: {reason}")] + WasmLoadError { path: PathBuf, reason: String }, + + /// I/O error reading files. + #[error("I/O error at {path}: {source}")] + Io { + path: PathBuf, + source: std::io::Error, + }, +} + +/// A fully-loaded module, ready for use. +/// +/// Returned by [`load_module`] after dispatch to the appropriate transport bridge. +/// The `PythonDelegated` variant is a signal to the Python host that it should +/// load the module itself via importlib. +#[cfg(feature = "wasm")] +pub enum LoadedModule { + /// A loaded tool module. + Tool(Arc), + /// A loaded hook handler module. + Hook(Arc), + /// A loaded context manager module. + Context(Arc), + /// A loaded approval provider module. + Approval(Arc), + /// A loaded provider module. + Provider(Arc), + /// A loaded orchestrator module. + Orchestrator(Arc), + /// Python/Native module — the Python host should load this via importlib. + PythonDelegated { + /// The Python package name to import. + package_name: String, + }, +} + +#[cfg(feature = "wasm")] +impl LoadedModule { + /// Returns the variant name as a static string (for diagnostics). + pub fn variant_name(&self) -> &'static str { + match self { + LoadedModule::Tool(_) => "Tool", + LoadedModule::Hook(_) => "Hook", + LoadedModule::Context(_) => "Context", + LoadedModule::Approval(_) => "Approval", + LoadedModule::Provider(_) => "Provider", + LoadedModule::Orchestrator(_) => "Orchestrator", + LoadedModule::PythonDelegated { .. } => "PythonDelegated", + } + } +} + +/// Load a module artifact into a runtime type, dispatching on transport and module type. +/// +/// For `Transport::Wasm`, reads bytes from the manifest artifact, then dispatches to +/// the appropriate `load_wasm_*` function based on `module_type`. +/// +/// For `Transport::Python` or `Transport::Native`, returns +/// [`LoadedModule::PythonDelegated`] as a signal to the Python host to handle loading +/// itself via importlib. +/// +/// For `Transport::Grpc`, returns an error — gRPC loading is async and must be done +/// directly with [`crate::transport::load_grpc_tool`] or +/// [`crate::transport::load_grpc_orchestrator`]. +/// +/// `coordinator` is required only for `ModuleType::Orchestrator` WASM modules. +#[cfg(feature = "wasm")] +pub fn load_module( + manifest: &ModuleManifest, + engine: Arc, + coordinator: Option>, +) -> Result> { + use crate::models::ModuleType; + + // Resolver modules are metadata-only — they cannot be loaded as runtime modules + if manifest.module_type == ModuleType::Resolver { + return Err("Resolver modules are not loadable as runtime modules".into()); + } + + match &manifest.transport { + Transport::Python | Transport::Native => { + let package_name = match &manifest.artifact { + ModuleArtifact::PythonModule(name) => name.clone(), + other => { + return Err(format!( + "expected PythonModule artifact for Python/Native transport, got {:?}", + other + ) + .into()) + } + }; + Ok(LoadedModule::PythonDelegated { package_name }) + } + + Transport::Wasm => { + let bytes = match &manifest.artifact { + ModuleArtifact::WasmBytes { bytes, .. } => bytes, + other => { + return Err(format!( + "expected WasmBytes artifact for WASM transport, got {:?}", + other + ) + .into()) + } + }; + + match &manifest.module_type { + ModuleType::Tool => { + let tool = crate::transport::load_wasm_tool(bytes, engine)?; + Ok(LoadedModule::Tool(tool)) + } + ModuleType::Hook => { + let hook = crate::transport::load_wasm_hook(bytes, engine)?; + Ok(LoadedModule::Hook(hook)) + } + ModuleType::Context => { + let ctx = crate::transport::load_wasm_context(bytes, engine)?; + Ok(LoadedModule::Context(ctx)) + } + ModuleType::Approval => { + let approval = crate::transport::load_wasm_approval(bytes, engine)?; + Ok(LoadedModule::Approval(approval)) + } + ModuleType::Provider => { + let provider = crate::transport::load_wasm_provider(bytes, engine)?; + Ok(LoadedModule::Provider(provider)) + } + ModuleType::Orchestrator => { + let coord = coordinator.ok_or( + "Orchestrator WASM module requires a Coordinator but none was provided", + )?; + let orch = crate::transport::load_wasm_orchestrator(bytes, engine, coord)?; + Ok(LoadedModule::Orchestrator(orch)) + } + // Resolver is rejected by the early-return guard above; this arm is unreachable. + ModuleType::Resolver => unreachable!( + "Resolver modules are rejected before transport dispatch" + ), + } + } + + Transport::Grpc => Err( + "gRPC module loading requires async runtime. Use load_grpc_tool() / load_grpc_orchestrator() directly.".into(), + ), + } +} + +/// Scan a directory for the first `.wasm` file. +/// +/// Reads the directory entries at `dir`, returning the path to the first +/// file with a `.wasm` extension, or `None` if no such file exists. +pub fn scan_for_wasm_file(dir: &Path) -> Option { + let entries = std::fs::read_dir(dir).ok()?; + for entry in entries { + let entry = match entry { + Ok(e) => e, + Err(_) => continue, + }; + let path = entry.path(); + if path.is_file() { + if let Some(ext) = path.extension() { + if ext == "wasm" { + return Some(path); + } + } + } + } + None +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn module_manifest_can_be_constructed() { + let manifest = ModuleManifest { + transport: Transport::Wasm, + module_type: ModuleType::Tool, + artifact: ModuleArtifact::WasmBytes { + bytes: vec![0, 1, 2], + path: PathBuf::from("/tmp/echo-tool.wasm"), + }, + }; + assert_eq!( + manifest, + ModuleManifest { + transport: Transport::Wasm, + module_type: ModuleType::Tool, + artifact: ModuleArtifact::WasmBytes { + bytes: vec![0, 1, 2], + path: PathBuf::from("/tmp/echo-tool.wasm"), + }, + } + ); + } + + #[test] + fn module_artifact_grpc_variant() { + let artifact = ModuleArtifact::GrpcEndpoint("http://localhost:50051".into()); + match artifact { + ModuleArtifact::GrpcEndpoint(endpoint) => { + assert_eq!(endpoint, "http://localhost:50051"); + } + _ => panic!("expected GrpcEndpoint variant"), + } + } + + #[test] + fn module_artifact_python_variant() { + let artifact = ModuleArtifact::PythonModule("amplifier_module_tool_bash".into()); + match artifact { + ModuleArtifact::PythonModule(name) => { + assert_eq!(name, "amplifier_module_tool_bash"); + } + _ => panic!("expected PythonModule variant"), + } + } + + #[test] + fn module_resolver_error_ambiguous_displays_found_interfaces() { + let err = ModuleResolverError::AmbiguousWasmInterface { + path: PathBuf::from("/tmp/multi.wasm"), + found: vec![ + "amplifier:modules/tool".into(), + "amplifier:modules/hook-handler".into(), + ], + }; + let msg = format!("{err}"); + assert!(msg.contains("/tmp/multi.wasm")); + assert!(msg.contains("amplifier:modules/tool, amplifier:modules/hook-handler")); + } + + #[test] + fn module_manifest_supports_equality() { + let a = ModuleManifest { + transport: Transport::Wasm, + module_type: ModuleType::Tool, + artifact: ModuleArtifact::GrpcEndpoint("http://localhost:50051".into()), + }; + let b = ModuleManifest { + transport: Transport::Wasm, + module_type: ModuleType::Tool, + artifact: ModuleArtifact::GrpcEndpoint("http://localhost:50051".into()), + }; + assert_eq!(a, b); + } + + #[test] + fn module_resolver_error_displays_correctly() { + let err = ModuleResolverError::NoArtifactFound { + path: PathBuf::from("/tmp/empty"), + }; + let msg = format!("{err}"); + assert!(msg.contains("/tmp/empty")); + assert!(msg.contains(".wasm")); + assert!(msg.contains("amplifier.toml")); + assert!(msg.contains("__init__.py")); + } + + // --- parse_amplifier_toml tests --- + + #[test] + fn parse_toml_grpc_transport() { + let toml_content = r#" +[module] +transport = "grpc" +type = "tool" + +[grpc] +endpoint = "http://localhost:50051" +"#; + let path = Path::new("/modules/my-tool"); + let manifest = parse_amplifier_toml(toml_content, path).unwrap(); + assert_eq!(manifest.transport, Transport::Grpc); + assert_eq!(manifest.module_type, ModuleType::Tool); + assert_eq!( + manifest.artifact, + ModuleArtifact::GrpcEndpoint("http://localhost:50051".into()) + ); + } + + #[test] + fn parse_toml_wasm_transport() { + let toml_content = r#" +[module] +transport = "wasm" +type = "hook" +artifact = "my-hook.wasm" +"#; + let path = Path::new("/modules/my-hook"); + let manifest = parse_amplifier_toml(toml_content, path).unwrap(); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Hook); + match &manifest.artifact { + ModuleArtifact::WasmBytes { + path: wasm_path, .. + } => { + assert_eq!(wasm_path, &PathBuf::from("/modules/my-hook/my-hook.wasm")); + } + other => panic!("expected WasmBytes, got {other:?}"), + } + } + + #[test] + fn parse_toml_python_transport() { + let toml_content = r#" +[module] +transport = "python" +type = "provider" +"#; + let path = Path::new("/modules/my-provider"); + let manifest = parse_amplifier_toml(toml_content, path).unwrap(); + assert_eq!(manifest.transport, Transport::Python); + assert_eq!(manifest.module_type, ModuleType::Provider); + assert_eq!( + manifest.artifact, + ModuleArtifact::PythonModule("my-provider".into()) + ); + } + + #[test] + fn parse_toml_grpc_missing_endpoint_errors() { + let toml_content = r#" +[module] +transport = "grpc" +type = "tool" +"#; + let path = Path::new("/modules/my-tool"); + let result = parse_amplifier_toml(toml_content, path); + assert!(result.is_err()); + let msg = format!("{}", result.unwrap_err()); + assert!(msg.contains("endpoint")); + } + + #[test] + fn parse_toml_missing_type_errors() { + let toml_content = r#" +[module] +transport = "grpc" +"#; + let path = Path::new("/modules/my-tool"); + let result = parse_amplifier_toml(toml_content, path); + assert!(result.is_err()); + let msg = format!("{}", result.unwrap_err()); + assert!(msg.contains("type")); + } + + #[test] + fn parse_toml_unknown_module_type_errors() { + let toml_content = r#" +[module] +transport = "grpc" +type = "foobar" + +[grpc] +endpoint = "http://localhost:50051" +"#; + let path = Path::new("/modules/my-tool"); + let result = parse_amplifier_toml(toml_content, path); + assert!(result.is_err()); + let msg = format!("{}", result.unwrap_err()); + assert!(msg.contains("unknown module type: foobar")); + } + + #[test] + fn parse_toml_missing_module_section_errors() { + let toml_content = r#" +[grpc] +endpoint = "http://localhost:50051" +"#; + let path = Path::new("/modules/my-tool"); + let result = parse_amplifier_toml(toml_content, path); + assert!(result.is_err()); + let msg = format!("{}", result.unwrap_err()); + assert!(msg.contains("module")); + } + + // --- scan_for_wasm_file tests --- + + #[test] + fn scan_wasm_finds_wasm_file() { + let dir = tempfile::tempdir().unwrap(); + let wasm_path = dir.path().join("echo-tool.wasm"); + std::fs::write(&wasm_path, b"fake wasm").unwrap(); + + let result = scan_for_wasm_file(dir.path()); + assert!(result.is_some(), "expected to find a .wasm file"); + assert_eq!(result.unwrap(), wasm_path); + } + + #[test] + fn scan_wasm_returns_none_for_empty_dir() { + let dir = tempfile::tempdir().unwrap(); + + let result = scan_for_wasm_file(dir.path()); + assert!(result.is_none(), "expected None for empty directory"); + } + + #[cfg(feature = "wasm")] + fn fixture_path(name: &str) -> std::path::PathBuf { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + manifest.join("../../tests/fixtures/wasm").join(name) + } + + #[cfg(feature = "wasm")] + fn fixture_bytes(name: &str) -> Vec { + let path = fixture_path(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture {name} not found at {}: {e}", path.display())) + } + + #[cfg(feature = "wasm")] + fn make_engine() -> std::sync::Arc { + crate::wasm_engine::WasmEngine::new().unwrap().inner() + } + + #[cfg(feature = "wasm")] + fn assert_detects(fixture: &str, expected: ModuleType) { + let bytes = fixture_bytes(fixture); + let path = fixture_path(fixture); + let engine = make_engine(); + let result = detect_wasm_module_type(&bytes, engine, &path).unwrap(); + assert_eq!(result, expected); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_tool() { + assert_detects("echo-tool.wasm", ModuleType::Tool); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_hook() { + assert_detects("deny-hook.wasm", ModuleType::Hook); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_context() { + assert_detects("memory-context.wasm", ModuleType::Context); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_approval() { + assert_detects("auto-approve.wasm", ModuleType::Approval); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_provider() { + assert_detects("echo-provider.wasm", ModuleType::Provider); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_orchestrator() { + assert_detects("passthrough-orchestrator.wasm", ModuleType::Orchestrator); + } + + #[test] + fn scan_wasm_ignores_non_wasm_files() { + let dir = tempfile::tempdir().unwrap(); + std::fs::write(dir.path().join("README.md"), b"# readme").unwrap(); + std::fs::write(dir.path().join("lib.py"), b"pass").unwrap(); + + let result = scan_for_wasm_file(dir.path()); + assert!( + result.is_none(), + "expected None when no .wasm files present" + ); + } + + // --- detect_python_package tests --- + + #[test] + fn detect_python_package_with_init_py() { + // Directory itself is a Python package (has __init__.py at top level). + // Name derived from directory name with dashes replaced by underscores. + let dir = tempfile::tempdir().unwrap(); + let pkg_dir = dir.path().join("amplifier-module-tool-bash"); + std::fs::create_dir_all(&pkg_dir).unwrap(); + std::fs::write(pkg_dir.join("__init__.py"), b"").unwrap(); + + let result = detect_python_package(&pkg_dir); + assert_eq!(result, Some("amplifier_module_tool_bash".to_string())); + } + + #[test] + fn detect_python_package_with_nested_package() { + // Directory contains a subdirectory that is a Python package. + let dir = tempfile::tempdir().unwrap(); + let pkg_dir = dir.path().join("my-module"); + let nested = pkg_dir.join("amplifier_module_tool_bash"); + std::fs::create_dir_all(&nested).unwrap(); + std::fs::write(nested.join("__init__.py"), b"").unwrap(); + + let result = detect_python_package(&pkg_dir); + assert_eq!(result, Some("amplifier_module_tool_bash".to_string())); + } + + #[test] + fn detect_python_package_empty_dir() { + let dir = tempfile::tempdir().unwrap(); + + let result = detect_python_package(dir.path()); + assert_eq!(result, None); + } + + #[test] + fn detect_python_package_no_init_py() { + // Directory has files but no __init__.py anywhere. + let dir = tempfile::tempdir().unwrap(); + std::fs::write(dir.path().join("README.md"), b"# readme").unwrap(); + std::fs::write(dir.path().join("main.py"), b"print('hello')").unwrap(); + + let result = detect_python_package(dir.path()); + assert_eq!(result, None); + } + + // --- resolve_module tests --- + + #[test] + fn resolve_module_with_amplifier_toml() { + let dir = tempfile::tempdir().expect("create temp dir"); + let toml_content = r#" +[module] +transport = "grpc" +type = "tool" + +[grpc] +endpoint = "http://localhost:9999" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml_content).expect("write toml"); + // Also add a .wasm file to prove TOML takes priority + std::fs::write(dir.path().join("echo-tool.wasm"), b"fake").expect("write wasm"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Grpc); + assert_eq!(manifest.module_type, ModuleType::Tool); + match manifest.artifact { + ModuleArtifact::GrpcEndpoint(ref ep) => assert_eq!(ep, "http://localhost:9999"), + _ => panic!("expected GrpcEndpoint"), + } + } + + #[test] + fn resolve_module_with_python_package() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# package").expect("write"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Python); + } + + #[test] + fn resolve_module_empty_dir_errors() { + let dir = tempfile::tempdir().expect("create temp dir"); + let result = resolve_module(dir.path()); + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!(err_msg.contains("could not detect")); + } + + #[test] + fn resolve_module_nonexistent_path_errors() { + let result = resolve_module(Path::new("/tmp/nonexistent-module-path-xyz")); + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!(err_msg.contains("does not exist")); + } + + #[cfg(feature = "wasm")] + #[test] + fn resolve_module_with_real_wasm_fixture() { + // Create a temp dir and copy a real fixture into it + let dir = tempfile::tempdir().expect("create temp dir"); + let wasm_bytes = fixture_bytes("echo-tool.wasm"); + std::fs::write(dir.path().join("echo-tool.wasm"), &wasm_bytes).expect("write wasm"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Tool); + match &manifest.artifact { + ModuleArtifact::WasmBytes { bytes, path } => { + assert!(!bytes.is_empty()); + assert!(path.to_string_lossy().contains("echo-tool.wasm")); + } + _ => panic!("expected WasmBytes"), + } + } + + #[cfg(feature = "wasm")] + #[tokio::test] + async fn load_module_wasm_tool() { + let dir = tempfile::tempdir().expect("create temp dir"); + let wasm_bytes = fixture_bytes("echo-tool.wasm"); + std::fs::write(dir.path().join("echo-tool.wasm"), &wasm_bytes).expect("write wasm"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + let engine = make_engine(); + let coordinator = std::sync::Arc::new(crate::coordinator::Coordinator::new_for_test()); + let result = load_module(&manifest, engine, Some(coordinator)); + assert!(result.is_ok()); + match result.unwrap() { + LoadedModule::Tool(tool) => assert_eq!(tool.name(), "echo-tool"), + other => panic!("expected Tool, got {:?}", other.variant_name()), + } + } + + #[cfg(feature = "wasm")] + #[test] + fn load_module_python_returns_signal() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# package").expect("write"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + let engine = make_engine(); + let result = load_module(&manifest, engine, None); + assert!(result.is_ok()); + match result.unwrap() { + LoadedModule::PythonDelegated { package_name } => { + assert!(!package_name.is_empty()); + } + other => panic!("expected PythonDelegated, got {:?}", other.variant_name()), + } + } + + /// Helper: resolve a fixture source directory via its amplifier.toml manifest and + /// assert the expected transport and module type. Mirrors the `assert_detects` helper + /// used for the WASM auto-detection path. + fn assert_resolves_toml(fixture_dir: &str, expected: ModuleType) { + let manifest_dir = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let fixture_src = manifest_dir.join(format!("../../tests/fixtures/wasm/src/{fixture_dir}")); + assert!( + fixture_src.exists(), + "fixture source dir should exist: {}", + fixture_src.display() + ); + + let manifest = resolve_module(&fixture_src).expect("should resolve via amplifier.toml"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, expected); + } + + #[test] + fn resolve_fixture_via_amplifier_toml() { + assert_resolves_toml("echo-tool", ModuleType::Tool); + } + + #[test] + fn resolve_fixture_via_amplifier_toml_hook() { + assert_resolves_toml("deny-hook", ModuleType::Hook); + } + + #[test] + fn resolve_fixture_via_amplifier_toml_context() { + assert_resolves_toml("memory-context", ModuleType::Context); + } + + #[test] + fn resolve_fixture_via_amplifier_toml_approval() { + assert_resolves_toml("auto-approve", ModuleType::Approval); + } + + #[test] + fn resolve_fixture_via_amplifier_toml_provider() { + assert_resolves_toml("echo-provider", ModuleType::Provider); + } + + #[test] + fn resolve_fixture_via_amplifier_toml_orchestrator() { + assert_resolves_toml("passthrough-orchestrator", ModuleType::Orchestrator); + } + + // --- path traversal tests (H-03) --- + + #[test] + fn parse_toml_wasm_artifact_path_with_slashes_rejected() { + let toml_content = r#" +[module] +transport = "wasm" +type = "tool" +artifact = "../../etc/passwd" +"#; + let path = Path::new("/tmp/test-module"); + let result = parse_amplifier_toml(toml_content, path); + assert!( + result.is_err(), + "expected error for artifact with path separators" + ); + let msg = format!("{}", result.unwrap_err()); + assert!( + msg.contains("simple filename"), + "error should mention 'simple filename': {msg}" + ); + } + + #[test] + fn parse_toml_wasm_artifact_dotdot_relative_rejected() { + let toml_content = r#" +[module] +transport = "wasm" +type = "tool" +artifact = "../sibling/module.wasm" +"#; + let path = Path::new("/tmp/test-module"); + let result = parse_amplifier_toml(toml_content, path); + assert!( + result.is_err(), + "expected error for dotdot relative artifact path" + ); + let msg = format!("{}", result.unwrap_err()); + assert!( + msg.contains("simple filename"), + "error should mention 'simple filename': {msg}" + ); + } + + #[test] + fn parse_toml_wasm_artifact_hidden_dot_file_rejected() { + let toml_content = r#" +[module] +transport = "wasm" +type = "tool" +artifact = ".hidden.wasm" +"#; + let path = Path::new("/tmp/test-module"); + let result = parse_amplifier_toml(toml_content, path); + assert!( + result.is_err(), + "expected error for artifact starting with '.'" + ); + let msg = format!("{}", result.unwrap_err()); + assert!( + msg.contains("simple filename"), + "error should mention 'simple filename': {msg}" + ); + } + + #[test] + fn parse_toml_wasm_artifact_simple_filename_accepted() { + // A well-formed artifact = "module.wasm" must be accepted. + // Uses a non-existent path — canonicalization is skipped when the + // file is absent (path is resolved at load-time, not parse-time). + let toml_content = r#" +[module] +transport = "wasm" +type = "tool" +artifact = "module.wasm" +"#; + let path = Path::new("/tmp/test-module"); + let result = parse_amplifier_toml(toml_content, path); + assert!( + result.is_ok(), + "expected success for simple filename, got: {:?}", + result.err() + ); + } + + #[test] + fn parse_toml_wasm_artifact_symlink_escape_rejected() { + // An artifact that is a simple filename but resolves via symlink to a + // path outside the module directory must be rejected (confinement check). + use std::os::unix::fs::symlink; + + let base = tempfile::tempdir().unwrap(); + let module_dir = base.path().join("my-module"); + std::fs::create_dir(&module_dir).unwrap(); + + // Create a "sensitive" wasm file one level above the module dir. + let sensitive = base.path().join("sensitive.wasm"); + std::fs::write(&sensitive, b"sensitive data").unwrap(); + + // Symlink inside the module dir → points outside. + symlink(&sensitive, module_dir.join("evil.wasm")).unwrap(); + + let toml_content = r#" +[module] +transport = "wasm" +type = "tool" +artifact = "evil.wasm" +"#; + let result = parse_amplifier_toml(toml_content, &module_dir); + assert!( + result.is_err(), + "expected error when artifact symlink escapes module directory" + ); + let msg = format!("{}", result.unwrap_err()); + assert!( + msg.contains("escapes module directory"), + "error should mention 'escapes module directory': {msg}" + ); + } + + #[cfg(feature = "wasm")] + #[test] + fn load_module_resolver_type_errors() { + let manifest = ModuleManifest { + transport: Transport::Python, + module_type: ModuleType::Resolver, + artifact: ModuleArtifact::PythonModule("some_resolver".into()), + }; + let engine = make_engine(); + let result = load_module(&manifest, engine, None); + assert!(result.is_err()); + } +} diff --git a/crates/amplifier-core/src/session.rs b/crates/amplifier-core/src/session.rs index 73ec9d6..2fa515c 100644 --- a/crates/amplifier-core/src/session.rs +++ b/crates/amplifier-core/src/session.rs @@ -19,6 +19,7 @@ use std::collections::HashMap; use std::sync::atomic::{AtomicBool, Ordering}; +use std::sync::Arc; use serde_json::Value; @@ -138,7 +139,7 @@ impl SessionConfig { pub struct Session { session_id: String, parent_id: Option, - coordinator: Coordinator, + coordinator: Arc, initialized: AtomicBool, status: SessionState, is_resumed: bool, @@ -158,7 +159,7 @@ impl Session { parent_id: Option, ) -> Self { let id = session_id.unwrap_or_else(|| uuid::Uuid::new_v4().to_string()); - let coordinator = Coordinator::new(config.config); + let coordinator = Arc::new(Coordinator::new(config.config)); // Set default fields for all hook events coordinator.hooks().set_default_fields(serde_json::json!({ @@ -222,9 +223,27 @@ impl Session { &self.coordinator } - /// Mutable reference to the coordinator (for mounting modules). + /// Mutable reference to the coordinator (for mounting modules during setup). + /// + /// # Panics + /// + /// Panics if the coordinator `Arc` has already been shared via + /// [`coordinator_shared()`](Self::coordinator_shared). Call this only + /// during the setup phase, before sharing the coordinator with other + /// services (e.g. `KernelServiceImpl`). pub fn coordinator_mut(&mut self) -> &mut Coordinator { - &mut self.coordinator + Arc::get_mut(&mut self.coordinator) + .expect("coordinator_mut() called after Arc was shared — only call during setup") + } + + /// Clone of the shared `Arc`, for passing to services that + /// need long-lived access to the coordinator (e.g. `KernelServiceImpl`). + /// + /// After calling this method, [`coordinator_mut()`](Self::coordinator_mut) + /// will panic because the Arc is no longer uniquely owned. Mount all + /// modules via `coordinator_mut()` *before* calling this. + pub fn coordinator_shared(&self) -> Arc { + Arc::clone(&self.coordinator) } /// Mark the session as initialized. @@ -805,4 +824,58 @@ mod tests { let result = SessionConfig::from_json("not json"); assert!(result.is_err()); } + + // --------------------------------------------------------------- + // Task 9: Arc — coordinator_shared() + // --------------------------------------------------------------- + + #[test] + fn coordinator_shared_returns_arc_to_same_coordinator() { + let config = SessionConfig::minimal("loop-basic", "context-simple"); + let session = Session::new(config, None, None); + + // Two calls should return Arcs pointing to the same allocation + let arc1 = session.coordinator_shared(); + let arc2 = session.coordinator_shared(); + assert!( + Arc::ptr_eq(&arc1, &arc2), + "coordinator_shared() should return clones of the same Arc" + ); + + // The Arc should behave like the coordinator + assert!(arc1.tools().is_empty()); + } + + #[test] + fn coordinator_and_coordinator_mut_still_work() { + let config = SessionConfig::minimal("loop-basic", "context-simple"); + let mut session = Session::new(config, None, None); + + // coordinator_mut() should still work for mounting modules + session + .coordinator_mut() + .mount_tool("echo", Arc::new(FakeTool::new("echo", "echoes"))); + + // coordinator() should see the change + let tools = session.coordinator().tools(); + assert_eq!(tools.len(), 1); + assert!(tools.contains_key("echo")); + } + + #[test] + fn coordinator_shared_reflects_mounted_modules() { + let config = SessionConfig::minimal("loop-basic", "context-simple"); + let mut session = Session::new(config, None, None); + + // Mount a tool via coordinator_mut() + session + .coordinator_mut() + .mount_tool("search", Arc::new(FakeTool::new("search", "searches"))); + + // The shared Arc should see the same state + let shared = session.coordinator_shared(); + let tools = shared.tools(); + assert_eq!(tools.len(), 1); + assert!(tools.contains_key("search")); + } } diff --git a/crates/amplifier-core/src/transport.rs b/crates/amplifier-core/src/transport.rs index b233627..a8da86c 100644 --- a/crates/amplifier-core/src/transport.rs +++ b/crates/amplifier-core/src/transport.rs @@ -2,7 +2,7 @@ use std::sync::Arc; -use crate::traits::Tool; +use crate::traits::{Orchestrator, Tool}; /// Supported transport types. #[derive(Debug, Clone, PartialEq)] @@ -34,6 +34,24 @@ pub async fn load_grpc_tool( Ok(Arc::new(bridge)) } +/// Load an orchestrator module via gRPC transport. +/// +/// # Arguments +/// +/// * `endpoint` — gRPC endpoint URL (e.g. `"http://localhost:50051"`). +/// * `session_id` — Session identifier threaded through execute requests so +/// the remote orchestrator can route KernelService callbacks back to the +/// correct session. +pub async fn load_grpc_orchestrator( + endpoint: &str, + session_id: &str, +) -> Result, Box> { + let bridge = + crate::bridges::grpc_orchestrator::GrpcOrchestratorBridge::connect(endpoint, session_id) + .await?; + Ok(Arc::new(bridge)) +} + /// Load a native Rust tool module (zero-overhead, no bridge). pub fn load_native_tool(tool: impl Tool + 'static) -> Arc { Arc::new(tool) @@ -43,8 +61,67 @@ pub fn load_native_tool(tool: impl Tool + 'static) -> Arc { #[cfg(feature = "wasm")] pub fn load_wasm_tool( wasm_bytes: &[u8], + engine: Arc, ) -> Result, Box> { - let bridge = crate::bridges::wasm_tool::WasmToolBridge::from_bytes(wasm_bytes)?; + let bridge = crate::bridges::wasm_tool::WasmToolBridge::from_bytes(wasm_bytes, engine)?; + Ok(Arc::new(bridge)) +} + +/// Load a WASM hook handler from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_hook( + wasm_bytes: &[u8], + engine: Arc, +) -> Result, Box> { + let bridge = crate::bridges::wasm_hook::WasmHookBridge::from_bytes(wasm_bytes, engine)?; + Ok(Arc::new(bridge)) +} + +/// Load a WASM context manager from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_context( + wasm_bytes: &[u8], + engine: Arc, +) -> Result, Box> { + let bridge = crate::bridges::wasm_context::WasmContextBridge::from_bytes(wasm_bytes, engine)?; + Ok(Arc::new(bridge)) +} + +/// Load a WASM approval provider from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_approval( + wasm_bytes: &[u8], + engine: Arc, +) -> Result, Box> { + let bridge = crate::bridges::wasm_approval::WasmApprovalBridge::from_bytes(wasm_bytes, engine)?; + Ok(Arc::new(bridge)) +} + +/// Load a WASM provider from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_provider( + wasm_bytes: &[u8], + engine: Arc, +) -> Result, Box> { + let bridge = crate::bridges::wasm_provider::WasmProviderBridge::from_bytes(wasm_bytes, engine)?; + Ok(Arc::new(bridge)) +} + +/// Load a WASM orchestrator from raw bytes (requires `wasm` feature). +/// +/// The orchestrator bridge requires a [`Coordinator`](crate::coordinator::Coordinator) +/// for kernel-service host imports used during execution. +#[cfg(feature = "wasm")] +pub fn load_wasm_orchestrator( + wasm_bytes: &[u8], + engine: Arc, + coordinator: Arc, +) -> Result, Box> { + let bridge = crate::bridges::wasm_orchestrator::WasmOrchestratorBridge::from_bytes( + wasm_bytes, + engine, + coordinator, + )?; Ok(Arc::new(bridge)) } @@ -60,4 +137,69 @@ mod tests { assert_eq!(Transport::from_str("wasm"), Transport::Wasm); assert_eq!(Transport::from_str("unknown"), Transport::Python); } + + #[cfg(feature = "wasm")] + fn fixture(name: &str) -> Vec { + // CARGO_MANIFEST_DIR = …/crates/amplifier-core; fixtures live at workspace root. + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../tests/fixtures/wasm").join(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture {name} not found at {}: {e}", path.display())) + } + + #[cfg(feature = "wasm")] + #[test] + fn load_wasm_tool_returns_arc_dyn_tool() { + let wasm_bytes = fixture("echo-tool.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let tool = super::load_wasm_tool(&wasm_bytes, engine.inner()); + assert!(tool.is_ok()); + assert_eq!(tool.unwrap().name(), "echo-tool"); + } + + #[cfg(feature = "wasm")] + #[test] + fn load_wasm_hook_returns_arc_dyn_hook_handler() { + let wasm_bytes = fixture("deny-hook.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let hook = super::load_wasm_hook(&wasm_bytes, engine.inner()); + assert!(hook.is_ok()); + } + + #[cfg(feature = "wasm")] + #[test] + fn load_wasm_context_returns_arc_dyn_context_manager() { + let wasm_bytes = fixture("memory-context.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let ctx = super::load_wasm_context(&wasm_bytes, engine.inner()); + assert!(ctx.is_ok()); + } + + #[cfg(feature = "wasm")] + #[test] + fn load_wasm_approval_returns_arc_dyn_approval_provider() { + let wasm_bytes = fixture("auto-approve.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let approval = super::load_wasm_approval(&wasm_bytes, engine.inner()); + assert!(approval.is_ok()); + } + + #[cfg(feature = "wasm")] + #[test] + fn load_wasm_provider_returns_arc_dyn_provider() { + let wasm_bytes = fixture("echo-provider.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let provider = super::load_wasm_provider(&wasm_bytes, engine.inner()); + assert!(provider.is_ok()); + } + + #[cfg(feature = "wasm")] + #[test] + fn load_wasm_orchestrator_returns_arc_dyn_orchestrator() { + let wasm_bytes = fixture("passthrough-orchestrator.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let coordinator = std::sync::Arc::new(crate::coordinator::Coordinator::new_for_test()); + let orch = super::load_wasm_orchestrator(&wasm_bytes, engine.inner(), coordinator); + assert!(orch.is_ok()); + } } diff --git a/crates/amplifier-core/src/wasm_engine.rs b/crates/amplifier-core/src/wasm_engine.rs new file mode 100644 index 0000000..65b3069 --- /dev/null +++ b/crates/amplifier-core/src/wasm_engine.rs @@ -0,0 +1,137 @@ +//! Shared Wasmtime engine infrastructure. +//! +//! Provides a `WasmEngine` wrapper holding a shared `Arc` +//! with the component model and epoch interruption enabled. + +use std::sync::Arc; +use wasmtime::Engine; + +/// Shared Wasmtime engine wrapper. +/// +/// Holds an `Arc` so clones share the same underlying engine. +/// The engine is configured with: +/// - Component Model support (`wasm_component_model(true)`) +/// - Epoch-based interruption (`epoch_interruption(true)`) — C-02 +/// +/// A background ticker thread is spawned on construction that increments +/// the engine's epoch counter every 10 ms (~100 Hz). Bridge stores set a +/// deadline of 3 000 ticks (~30 s) so runaway WASM modules are terminated +/// automatically. +#[derive(Clone)] +pub struct WasmEngine { + engine: Arc, +} + +impl WasmEngine { + /// Create a new `WasmEngine` with the component model and epoch + /// interruption enabled, and spawn a background ticker thread. + /// + /// # Errors + /// + /// Returns an error if wasmtime fails to build the engine (e.g. unsupported + /// CPU features). This is extremely rare in practice. + pub fn new() -> Result> { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + // C-02: enable epoch interruption so stores can set a time-budget. + config.epoch_interruption(true); + let engine = Arc::new(Engine::new(&config)?); + + // Spawn a background thread that increments the epoch every 10 ms + // (~100 Hz). Stores set a deadline of 3 000 ticks (~30 s). + // The thread holds a weak reference to the engine so that when the + // last strong Arc is dropped the thread exits cleanly. + let engine_weak = Arc::downgrade(&engine); + std::thread::Builder::new() + .name("wasm-epoch-ticker".into()) + .spawn(move || loop { + std::thread::sleep(std::time::Duration::from_millis(10)); + match engine_weak.upgrade() { + Some(e) => e.increment_epoch(), + None => break, // engine dropped — stop the ticker + } + })?; + + Ok(Self { engine }) + } + + /// Return a clone of the inner `Arc`. + pub fn inner(&self) -> Arc { + Arc::clone(&self.engine) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn engine_creates_successfully() { + let result = WasmEngine::new(); + assert!(result.is_ok(), "WasmEngine::new() should succeed"); + } + + #[test] + fn engine_clone_shares_same_arc() { + let engine1 = WasmEngine::new().expect("engine creation should succeed"); + let engine2 = engine1.clone(); + assert!( + Arc::ptr_eq(&engine1.engine, &engine2.engine), + "Cloned WasmEngine should share the same Arc" + ); + } + + #[test] + fn engine_inner_returns_valid_arc() { + let engine = WasmEngine::new().expect("engine creation should succeed"); + let inner = engine.inner(); + assert!( + Arc::strong_count(&inner) >= 2, + "inner() should return an Arc with strong_count >= 2" + ); + } + + /// Verify that `WasmEngine` has epoch interruption enabled. + /// + /// When `epoch_interruption(true)` is set on the engine config, the compiler + /// inserts epoch check points at function entries and loop back-edges. Calling + /// a function on a store whose epoch deadline has already been exceeded will + /// immediately trap. Without epoch interruption the same function succeeds. + /// + /// RED: fails with current WasmEngine (no epoch_interruption) — nop() succeeds. + /// GREEN: passes after enabling epoch_interruption + ticker (nop() traps). + #[test] + fn epoch_interruption_is_enabled() { + let we = WasmEngine::new().expect("WasmEngine::new() should succeed"); + let engine = we.inner(); + + // A minimal WAT module: one exported function with no body. + // When epoch_interruption is enabled, wasmtime inserts an epoch check + // at every function entry, so calling "nop" with an exceeded deadline traps. + let wat = r#"(module (func (export "nop")))"#; + + let module = wasmtime::Module::new(&engine, wat).expect("simple WAT module should compile"); + + let mut store = wasmtime::Store::new(&engine, ()); + // deadline = current_epoch (0) + 1 = 1 + store.set_epoch_deadline(1); + // Manually advance the epoch so it equals the deadline. + // With epoch_interruption enabled the next function call will trap. + engine.increment_epoch(); + + let instance = wasmtime::Instance::new(&mut store, &module, &[]) + .expect("module instantiation should succeed"); + let nop = instance + .get_typed_func::<(), ()>(&mut store, "nop") + .expect("nop export should exist"); + + // With epoch_interruption enabled: epoch >= deadline → immediate trap. + // With epoch_interruption disabled: epoch checks absent → Ok(()). + let result = nop.call(&mut store, ()); + assert!( + result.is_err(), + "nop() should trap due to exceeded epoch deadline — \ + epoch_interruption may not be enabled in WasmEngine::new()" + ); + } +} diff --git a/crates/amplifier-core/tests/mixed_transport_e2e.rs b/crates/amplifier-core/tests/mixed_transport_e2e.rs new file mode 100644 index 0000000..d6d6a92 --- /dev/null +++ b/crates/amplifier-core/tests/mixed_transport_e2e.rs @@ -0,0 +1,293 @@ +//! Mixed-transport E2E integration test. +//! +//! Proves that native Rust modules (simulating Python-loaded modules) and +//! WASM modules coexist in the same Coordinator without the coordinator being +//! able to tell them apart — all are just `Arc` at runtime. +//! +//! Test scenario: +//! - Native `FakeOrchestrator` → simulates a Python-loaded orchestrator +//! - Native `FakeProvider` → simulates a Python-loaded provider +//! - WASM `echo-tool.wasm` → loaded via `load_wasm_tool()` +//! - WASM `deny-hook.wasm` → loaded via `load_wasm_hook()` and registered +//! +//! Run with: +//! cargo test -p amplifier-core --features wasm --test mixed_transport_e2e -- --test-threads=1 + +#![cfg(feature = "wasm")] + +use std::collections::HashMap; +use std::sync::Arc; + +use serde_json::json; + +use amplifier_core::coordinator::Coordinator; +use amplifier_core::models::HookAction; +use amplifier_core::testing::{FakeContextManager, FakeOrchestrator, FakeProvider}; +use amplifier_core::transport::{load_wasm_hook, load_wasm_tool}; +use amplifier_core::wasm_engine::WasmEngine; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/// Load a pre-compiled .wasm fixture by name. +/// +/// CARGO_MANIFEST_DIR = `.../crates/amplifier-core`; fixtures live at the +/// workspace root under `tests/fixtures/wasm/`. +fn fixture(name: &str) -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../tests/fixtures/wasm").join(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture '{}' not found at {}: {}", name, path.display(), e)) +} + +/// Create a shared wasmtime Engine with Component Model enabled. +fn make_engine() -> Arc { + WasmEngine::new() + .expect("WasmEngine::new() should succeed") + .inner() +} + +// --------------------------------------------------------------------------- +// Test: mixed-transport session — native + WASM modules coexist +// --------------------------------------------------------------------------- + +/// Prove that native Rust modules (simulating Python-loaded) and WASM modules +/// coexist in the same Coordinator session. +/// +/// Steps: +/// 1. Create Coordinator +/// 2. Mount native FakeOrchestrator (simulates Python-loaded module) +/// 3. Mount native FakeProvider (simulates Python-loaded module) +/// 4. Mount WASM echo-tool (loaded via load_wasm_tool) +/// 5. Register WASM deny-hook (loaded via load_wasm_hook) +/// 6. Assert all four are present and queryable +/// 7. Execute WASM tool via coordinator → verify result +/// 8. Emit hook event → verify WASM deny-hook fires and returns Deny +/// 9. Call native provider via coordinator → verify response +/// 10. Call native orchestrator via coordinator → verify response +#[tokio::test] +async fn mixed_transport_session() { + let engine = make_engine(); + + // ── Step 1: Create Coordinator ────────────────────────────────────────── + let coordinator = Arc::new(Coordinator::new_for_test()); + + // ── Step 2: Mount native FakeOrchestrator ────────────────────────────── + let native_orch = Arc::new(FakeOrchestrator::new("native-orchestrator-response")); + coordinator.set_orchestrator(Arc::clone(&native_orch) as _); + + // ── Step 3: Mount native FakeProvider ────────────────────────────────── + let native_provider = Arc::new(FakeProvider::new( + "fake-provider", + "native-provider-response", + )); + coordinator.mount_provider("fake-provider", Arc::clone(&native_provider) as _); + + // ── Step 4: Mount WASM echo-tool ─────────────────────────────────────── + let echo_bytes = fixture("echo-tool.wasm"); + let wasm_tool = + load_wasm_tool(&echo_bytes, Arc::clone(&engine)).expect("load_wasm_tool should succeed"); + coordinator.mount_tool("echo-tool", Arc::clone(&wasm_tool)); + + // ── Step 5: Register WASM deny-hook ──────────────────────────────────── + let deny_bytes = fixture("deny-hook.wasm"); + let wasm_hook = + load_wasm_hook(&deny_bytes, Arc::clone(&engine)).expect("load_wasm_hook should succeed"); + // `register` returns an unregister closure; bind it so the must_use warning is satisfied. + let _unregister_deny_hook = coordinator.hooks().register( + "tool:before_execute", + wasm_hook, + 0, + Some("wasm-deny-hook".to_string()), + ); + + // ── Step 6: Verify all four modules are present and queryable ─────────── + + // Orchestrator is set + assert!( + coordinator.has_orchestrator(), + "coordinator should have an orchestrator mounted" + ); + + // Provider is queryable by name + let retrieved_provider = coordinator + .get_provider("fake-provider") + .expect("fake-provider should be mounted"); + assert_eq!( + retrieved_provider.name(), + "fake-provider", + "provider name mismatch" + ); + + // WASM tool is queryable by name + let retrieved_tool = coordinator + .get_tool("echo-tool") + .expect("echo-tool should be mounted"); + assert_eq!( + retrieved_tool.name(), + "echo-tool", + "WASM tool name mismatch" + ); + + // Confirm tool/provider counts (coordinator sees 1 tool, 1 provider) + assert_eq!( + coordinator.tools().len(), + 1, + "coordinator should have exactly 1 tool mounted" + ); + assert_eq!( + coordinator.providers().len(), + 1, + "coordinator should have exactly 1 provider mounted" + ); + + // ── Step 7: Execute WASM tool via coordinator → verify result ─────────── + let tool = coordinator + .get_tool("echo-tool") + .expect("echo-tool must be mounted"); + let input = json!({"mixed": "transport", "source": "wasm"}); + let tool_result = tool + .execute(input.clone()) + .await + .expect("WASM echo-tool execute() should succeed"); + + assert!( + tool_result.success, + "WASM echo-tool should return success=true" + ); + assert_eq!( + tool_result.output, + Some(input), + "WASM echo-tool should echo the input back" + ); + + // ── Step 8: Emit hook event → verify WASM deny-hook fires (returns Deny) ─ + let hook_result = coordinator + .hooks() + .emit("tool:before_execute", json!({"tool": "bash"})) + .await; + + assert_eq!( + hook_result.action, + HookAction::Deny, + "WASM deny-hook should return Deny, got {:?}", + hook_result.action + ); + assert!( + hook_result.reason.is_some(), + "WASM deny-hook should provide a denial reason" + ); + + // ── Step 9: Call native provider via coordinator → verify response ────── + use amplifier_core::messages::{ChatRequest, Message, MessageContent, Role}; + + let provider = coordinator + .get_provider("fake-provider") + .expect("fake-provider must be mounted"); + let chat_request = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("hello from mixed session".to_string()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + let provider_response = provider + .complete(chat_request) + .await + .expect("native FakeProvider.complete() should succeed"); + + assert!( + !provider_response.content.is_empty(), + "native FakeProvider should return non-empty content" + ); + + // ── Step 10: Call native orchestrator via coordinator → verify response ── + let orchestrator = coordinator + .orchestrator() + .expect("orchestrator must be mounted"); + let orch_response = orchestrator + .execute( + "mixed transport test prompt".to_string(), + Arc::new(FakeContextManager::new()), + Default::default(), + Default::default(), + json!({}), + json!({}), + ) + .await + .expect("native FakeOrchestrator.execute() should succeed"); + + assert_eq!( + orch_response, "native-orchestrator-response", + "native orchestrator should return the configured response" + ); +} + +// --------------------------------------------------------------------------- +// Test: coordinator to_dict reflects both native and WASM modules +// --------------------------------------------------------------------------- + +/// Verify that `Coordinator::to_dict()` correctly reports mixed-transport state. +/// +/// The coordinator's introspection API must be unaware of transport origin — +/// both native and WASM modules appear identically in the registry. +#[test] +fn coordinator_to_dict_reflects_mixed_modules() { + let engine = make_engine(); + + let coordinator = Coordinator::new_for_test(); + + // Mount native provider + let native_provider = Arc::new(FakeProvider::new("native-llm", "response")); + coordinator.mount_provider("native-llm", native_provider as _); + + // Mount WASM tool + let echo_bytes = fixture("echo-tool.wasm"); + let wasm_tool = load_wasm_tool(&echo_bytes, engine).expect("load_wasm_tool should succeed"); + coordinator.mount_tool("echo-tool", wasm_tool); + + // Mount native orchestrator + let orch = Arc::new(FakeOrchestrator::new("ok")); + coordinator.set_orchestrator(orch as _); + + // Inspect via to_dict + let dict = coordinator.to_dict(); + + let tools = dict["tools"].as_array().expect("tools must be array"); + assert!( + tools.contains(&json!("echo-tool")), + "to_dict should list WASM echo-tool, got: {tools:?}" + ); + + let providers = dict["providers"] + .as_array() + .expect("providers must be array"); + assert!( + providers.contains(&json!("native-llm")), + "to_dict should list native provider, got: {providers:?}" + ); + + assert_eq!( + dict["has_orchestrator"], + json!(true), + "to_dict should report orchestrator as mounted" + ); +} diff --git a/crates/amplifier-core/tests/module_resolver_e2e.rs b/crates/amplifier-core/tests/module_resolver_e2e.rs new file mode 100644 index 0000000..24d13fc --- /dev/null +++ b/crates/amplifier-core/tests/module_resolver_e2e.rs @@ -0,0 +1,298 @@ +//! E2E integration tests for the full module resolver pipeline. +//! +//! Tests the complete resolve → detect type → load → execute pipeline +//! for all supported module types. +//! +//! Run with: cargo test -p amplifier-core --features wasm --test module_resolver_e2e + +#![cfg(feature = "wasm")] + +use std::path::Path; +use std::sync::Arc; + +use amplifier_core::models::ModuleType; +use amplifier_core::module_resolver::{ + load_module, resolve_module, LoadedModule, ModuleArtifact, ModuleResolverError, +}; +use amplifier_core::transport::Transport; +use amplifier_core::wasm_engine::WasmEngine; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/// Load a pre-compiled .wasm fixture by name. +/// +/// CARGO_MANIFEST_DIR = `.../crates/amplifier-core`; fixtures live two +/// levels up at the workspace root under `tests/fixtures/wasm/`. +fn fixture(name: &str) -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../tests/fixtures/wasm").join(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture '{}' not found at {}: {}", name, path.display(), e)) +} + +/// Create a shared wasmtime Engine with Component Model enabled. +fn make_engine() -> Arc { + WasmEngine::new() + .expect("WasmEngine::new() should succeed") + .inner() +} + +/// Create a temp directory containing the named fixture file. +fn dir_with_wasm(fixture_name: &str) -> tempfile::TempDir { + let dir = tempfile::tempdir().expect("create temp dir"); + let bytes = fixture(fixture_name); + std::fs::write(dir.path().join(fixture_name), &bytes).expect("write fixture"); + dir +} + +// --------------------------------------------------------------------------- +// Resolve + detect type for each of the 6 WASM module types +// --------------------------------------------------------------------------- + +/// Resolve echo-tool.wasm and verify Transport::Wasm + ModuleType::Tool. +#[test] +fn resolve_wasm_tool() { + let dir = dir_with_wasm("echo-tool.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Tool); +} + +/// Resolve deny-hook.wasm and verify Transport::Wasm + ModuleType::Hook. +#[test] +fn resolve_wasm_hook() { + let dir = dir_with_wasm("deny-hook.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Hook); +} + +/// Resolve memory-context.wasm and verify Transport::Wasm + ModuleType::Context. +#[test] +fn resolve_wasm_context() { + let dir = dir_with_wasm("memory-context.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Context); +} + +/// Resolve auto-approve.wasm and verify Transport::Wasm + ModuleType::Approval. +#[test] +fn resolve_wasm_approval() { + let dir = dir_with_wasm("auto-approve.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Approval); +} + +/// Resolve echo-provider.wasm and verify Transport::Wasm + ModuleType::Provider. +#[test] +fn resolve_wasm_provider() { + let dir = dir_with_wasm("echo-provider.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Provider); +} + +/// Resolve passthrough-orchestrator.wasm and verify Transport::Wasm + ModuleType::Orchestrator. +#[test] +fn resolve_wasm_orchestrator() { + let dir = dir_with_wasm("passthrough-orchestrator.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Orchestrator); +} + +// --------------------------------------------------------------------------- +// Python package detection +// --------------------------------------------------------------------------- + +/// Resolve a directory containing __init__.py — expects Python transport with +/// ModuleType::Tool (default) and a PythonModule artifact. +#[test] +fn resolve_python_package() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# package").expect("write __init__.py"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Python); + assert_eq!(manifest.module_type, ModuleType::Tool); + match &manifest.artifact { + ModuleArtifact::PythonModule(name) => { + assert!(!name.is_empty(), "package name should be non-empty"); + } + other => panic!("expected PythonModule artifact, got {other:?}"), + } +} + +// --------------------------------------------------------------------------- +// amplifier.toml gRPC detection +// --------------------------------------------------------------------------- + +/// Resolve a directory with amplifier.toml (gRPC transport) — expects the +/// endpoint from the TOML to be captured in the manifest. +#[test] +fn resolve_amplifier_toml_grpc() { + let dir = tempfile::tempdir().expect("create temp dir"); + let toml_content = r#" +[module] +transport = "grpc" +type = "tool" + +[grpc] +endpoint = "http://localhost:50051" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml_content).expect("write amplifier.toml"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Grpc); + assert_eq!(manifest.module_type, ModuleType::Tool); + match &manifest.artifact { + ModuleArtifact::GrpcEndpoint(endpoint) => { + assert_eq!(endpoint, "http://localhost:50051"); + } + other => panic!("expected GrpcEndpoint artifact, got {other:?}"), + } +} + +// --------------------------------------------------------------------------- +// Priority: amplifier.toml overrides WASM auto-detection +// --------------------------------------------------------------------------- + +/// When both echo-tool.wasm AND amplifier.toml are present, the TOML wins. +/// Transport must be Grpc (from the TOML), not Wasm (from the .wasm file). +#[test] +fn resolve_amplifier_toml_overrides_auto() { + let dir = tempfile::tempdir().expect("create temp dir"); + + // Write a real WASM component that would otherwise be auto-detected as Tool. + let wasm_bytes = fixture("echo-tool.wasm"); + std::fs::write(dir.path().join("echo-tool.wasm"), &wasm_bytes).expect("write wasm"); + + // Write an amplifier.toml pointing to gRPC — it should override the .wasm. + let toml_content = r#" +[module] +transport = "grpc" +type = "tool" + +[grpc] +endpoint = "http://localhost:50051" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml_content).expect("write amplifier.toml"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!( + manifest.transport, + Transport::Grpc, + "amplifier.toml must override WASM auto-detection" + ); +} + +// --------------------------------------------------------------------------- +// Error cases +// --------------------------------------------------------------------------- + +/// An empty directory produces a resolution error mentioning "could not detect". +#[test] +fn resolve_empty_dir_errors() { + let dir = tempfile::tempdir().expect("create temp dir"); + + // Annotate with the error type so the ModuleResolverError import is used. + let result: Result<_, ModuleResolverError> = resolve_module(dir.path()); + + assert!(result.is_err(), "empty dir should produce an error"); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("could not detect"), + "error should mention 'could not detect', got: {err_msg}" + ); +} + +/// A path that does not exist produces a resolution error mentioning "does not exist". +#[test] +fn resolve_nonexistent_path_errors() { + let result = resolve_module(Path::new("/nonexistent/path-xyz-resolver-e2e-999")); + + assert!(result.is_err(), "nonexistent path should produce an error"); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("does not exist"), + "error should mention 'does not exist', got: {err_msg}" + ); +} + +// --------------------------------------------------------------------------- +// Full pipeline: resolve → load → execute +// --------------------------------------------------------------------------- + +/// Full pipeline for the echo-tool: +/// resolve echo-tool.wasm → load → execute JSON input → verify roundtrip. +/// +/// The echo-tool fixture echoes back its input verbatim. +#[tokio::test] +async fn load_module_wasm_tool_e2e() { + let dir = dir_with_wasm("echo-tool.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + + let engine = make_engine(); + let coordinator = Arc::new(amplifier_core::Coordinator::new_for_test()); + let loaded = load_module(&manifest, engine, Some(coordinator)).expect("should load"); + + match loaded { + LoadedModule::Tool(tool) => { + assert_eq!(tool.name(), "echo-tool"); + let input = serde_json::json!({"message": "hello from resolver", "count": 7}); + let result = tool + .execute(input.clone()) + .await + .expect("execute should succeed"); + assert!(result.success); + assert_eq!(result.output, Some(input)); + } + other => panic!("expected Tool, got {}", other.variant_name()), + } +} + +/// Full pipeline for deny-hook: +/// resolve deny-hook.wasm → load → verify the variant is LoadedModule::Hook. +#[tokio::test] +async fn load_module_wasm_hook_e2e() { + let dir = dir_with_wasm("deny-hook.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + + let engine = make_engine(); + let coordinator = Arc::new(amplifier_core::Coordinator::new_for_test()); + let loaded = load_module(&manifest, engine, Some(coordinator)).expect("should load"); + + match loaded { + LoadedModule::Hook(_) => { + // Verified: the resolver correctly identified and loaded a Hook module. + } + other => panic!("expected Hook, got {}", other.variant_name()), + } +} + +/// Full pipeline for a Python package: +/// resolve dir with __init__.py → load → verify LoadedModule::PythonDelegated +/// with a non-empty package_name (the Python host should load it via importlib). +#[test] +fn load_module_python_returns_delegated() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# package").expect("write __init__.py"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + let engine = make_engine(); + let loaded = load_module(&manifest, engine, None).expect("should load"); + + match loaded { + LoadedModule::PythonDelegated { package_name } => { + assert!( + !package_name.is_empty(), + "package_name should be non-empty, got empty string" + ); + } + other => panic!("expected PythonDelegated, got {}", other.variant_name()), + } +} diff --git a/crates/amplifier-core/tests/wasm_e2e.rs b/crates/amplifier-core/tests/wasm_e2e.rs new file mode 100644 index 0000000..ed74e62 --- /dev/null +++ b/crates/amplifier-core/tests/wasm_e2e.rs @@ -0,0 +1,378 @@ +//! WASM E2E integration tests. +//! +//! Tests all 6 WASM module types end-to-end using pre-compiled .wasm fixtures. +//! Each test loads a fixture via `transport::load_wasm_*` and calls trait methods +//! directly — this is the public API surface, not the bridge internals. +//! +//! Run with: cargo test -p amplifier-core --features wasm --test wasm_e2e + +#![cfg(feature = "wasm")] + +use std::collections::HashMap; +use std::sync::Arc; + +use serde_json::json; + +use amplifier_core::messages::{ChatRequest, Message, MessageContent, Role}; +use amplifier_core::models::{ApprovalRequest, HookAction}; +use amplifier_core::transport::{ + load_wasm_approval, load_wasm_context, load_wasm_hook, load_wasm_orchestrator, + load_wasm_provider, load_wasm_tool, +}; +use amplifier_core::wasm_engine::WasmEngine; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +/// Load a pre-compiled .wasm fixture by name. +/// +/// CARGO_MANIFEST_DIR = `.../crates/amplifier-core`; fixtures live two +/// levels up at the workspace root under `tests/fixtures/wasm/`. +fn fixture(name: &str) -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../tests/fixtures/wasm").join(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture '{}' not found at {}: {}", name, path.display(), e)) +} + +/// Create a shared wasmtime Engine with Component Model enabled. +fn make_engine() -> Arc { + WasmEngine::new() + .expect("WasmEngine::new() should succeed") + .inner() +} + +// --------------------------------------------------------------------------- +// Test 1: Tool — load from bytes +// --------------------------------------------------------------------------- + +/// Load `echo-tool.wasm` via `load_wasm_tool` and verify the public API surface: +/// - `name()` returns "echo-tool" +/// - `get_spec()` has the correct name and a description +#[test] +fn tool_load_from_bytes() { + let engine = make_engine(); + let bytes = fixture("echo-tool.wasm"); + + let tool = load_wasm_tool(&bytes, engine).expect("load_wasm_tool should succeed"); + + assert_eq!(tool.name(), "echo-tool", "name() mismatch"); + + let spec = tool.get_spec(); + assert_eq!(spec.name, "echo-tool", "spec.name mismatch"); + assert!( + spec.description.is_some(), + "spec.description should be set, got None" + ); + assert!( + !spec.description.as_deref().unwrap_or("").is_empty(), + "spec.description should be non-empty" + ); +} + +// --------------------------------------------------------------------------- +// Test 2: Tool — execute roundtrip +// --------------------------------------------------------------------------- + +/// Load echo-tool, execute with JSON input, verify the output echoes the input. +#[tokio::test] +async fn tool_execute_roundtrip() { + let engine = make_engine(); + let bytes = fixture("echo-tool.wasm"); + + let tool = load_wasm_tool(&bytes, engine).expect("load_wasm_tool should succeed"); + + let input = json!({"message": "hello", "count": 42}); + let result = tool + .execute(input.clone()) + .await + .expect("execute should succeed"); + + assert!(result.success, "ToolResult.success should be true"); + assert_eq!( + result.output, + Some(input), + "ToolResult.output should echo the input" + ); +} + +// --------------------------------------------------------------------------- +// Test 3: Hook — deny action +// --------------------------------------------------------------------------- + +/// Load `deny-hook.wasm`, handle an event, verify the hook returns Deny. +#[tokio::test] +async fn hook_handler_deny() { + let engine = make_engine(); + let bytes = fixture("deny-hook.wasm"); + + let hook = load_wasm_hook(&bytes, engine).expect("load_wasm_hook should succeed"); + + let result = hook + .handle("tool:before_execute", json!({"tool": "bash"})) + .await + .expect("handle should succeed"); + + assert_eq!( + result.action, + HookAction::Deny, + "expected action == Deny, got {:?}", + result.action + ); + assert!( + result.reason.is_some(), + "expected a denial reason, got None" + ); + let reason = result.reason.as_deref().unwrap_or(""); + assert!( + reason.contains("Denied") || reason.contains("denied"), + "expected reason to contain 'Denied', got: {reason:?}" + ); +} + +// --------------------------------------------------------------------------- +// Test 4: Context — stateful roundtrip +// --------------------------------------------------------------------------- + +/// Load `memory-context.wasm` and exercise the full stateful cycle: +/// get (empty) → add → add → get (2 messages) → clear → get (empty) +#[tokio::test] +async fn context_manager_roundtrip() { + let engine = make_engine(); + let bytes = fixture("memory-context.wasm"); + + let ctx = load_wasm_context(&bytes, engine).expect("load_wasm_context should succeed"); + + // Initially empty. + let initial = ctx + .get_messages() + .await + .expect("get_messages should succeed"); + assert!( + initial.is_empty(), + "expected empty context on fresh load, got {} messages", + initial.len() + ); + + // Add two messages. + let msg1 = json!({"role": "user", "content": "Hello"}); + let msg2 = json!({"role": "assistant", "content": "Hi there!"}); + ctx.add_message(msg1.clone()) + .await + .expect("add_message 1 should succeed"); + ctx.add_message(msg2.clone()) + .await + .expect("add_message 2 should succeed"); + + // Now there should be 2 messages. + let messages = ctx + .get_messages() + .await + .expect("get_messages should succeed"); + assert_eq!( + messages.len(), + 2, + "expected 2 messages after two add_message calls, got {}", + messages.len() + ); + + // Clear the context. + ctx.clear().await.expect("clear should succeed"); + + // Back to empty. + let after_clear = ctx + .get_messages() + .await + .expect("get_messages after clear should succeed"); + assert!( + after_clear.is_empty(), + "expected empty context after clear, got {} messages", + after_clear.len() + ); +} + +// --------------------------------------------------------------------------- +// Test 5: Approval — auto-approve +// --------------------------------------------------------------------------- + +/// Load `auto-approve.wasm` and verify that every request is auto-approved. +#[tokio::test] +async fn approval_auto_approve() { + let engine = make_engine(); + let bytes = fixture("auto-approve.wasm"); + + let approval = load_wasm_approval(&bytes, engine).expect("load_wasm_approval should succeed"); + + let request = ApprovalRequest { + tool_name: "bash".to_string(), + action: "Execute shell command".to_string(), + details: HashMap::new(), + risk_level: "medium".to_string(), + timeout: Some(30.0), + }; + + let response = approval + .request_approval(request) + .await + .expect("request_approval should succeed"); + + assert!( + response.approved, + "auto-approve fixture should always approve, got approved=false" + ); +} + +// --------------------------------------------------------------------------- +// Test 6: Provider — complete roundtrip +// --------------------------------------------------------------------------- + +/// Load `echo-provider.wasm`, verify name/info/models, and call complete(). +#[tokio::test] +async fn provider_complete() { + let engine = make_engine(); + let bytes = fixture("echo-provider.wasm"); + + let provider = load_wasm_provider(&bytes, engine).expect("load_wasm_provider should succeed"); + + // Verify name. + assert_eq!(provider.name(), "echo-provider", "provider.name() mismatch"); + + // Verify get_info(). + let info = provider.get_info(); + assert_eq!(info.id, "echo-provider", "info.id mismatch"); + assert!( + !info.display_name.is_empty(), + "info.display_name should be non-empty" + ); + + // Verify list_models() returns at least one model. + let models = provider + .list_models() + .await + .expect("list_models should succeed"); + assert!( + !models.is_empty(), + "list_models() should return at least one model" + ); + + // Call complete() with a minimal ChatRequest and verify non-empty content. + let request = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("Hello, echo provider!".to_string()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + + let response = provider + .complete(request) + .await + .expect("complete() should succeed"); + + assert!( + !response.content.is_empty(), + "provider.complete() should return non-empty content" + ); +} + +// --------------------------------------------------------------------------- +// Test 7: Orchestrator — calls echo-tool via kernel-service +// --------------------------------------------------------------------------- + +/// Load `passthrough-orchestrator.wasm` with a coordinator that has `echo-tool` +/// mounted (the real WASM echo-tool bridge), then call `execute()` and verify +/// a non-empty response is returned. +/// +/// Flow: +/// load_wasm_orchestrator → WASM execute() → kernel-service::execute-tool (host import) +/// → coordinator.get_tool("echo-tool") → WasmToolBridge → echo-tool WASM +/// → ToolResult back → orchestrator serialises it → non-empty String +#[tokio::test] +async fn orchestrator_calls_kernel() { + let engine = make_engine(); + let orch_bytes = fixture("passthrough-orchestrator.wasm"); + let echo_bytes = fixture("echo-tool.wasm"); + + // Build a coordinator and mount the WASM echo-tool bridge. + let coordinator = Arc::new(amplifier_core::coordinator::Coordinator::new_for_test()); + let echo_tool = + load_wasm_tool(&echo_bytes, Arc::clone(&engine)).expect("load echo-tool for coordinator"); + coordinator.mount_tool("echo-tool", echo_tool); + + // Load the orchestrator with the prepared coordinator. + let orchestrator = + load_wasm_orchestrator(&orch_bytes, Arc::clone(&engine), Arc::clone(&coordinator)) + .expect("load_wasm_orchestrator should succeed"); + + // Execute the orchestrator with a simple prompt. + let result = orchestrator + .execute( + "test prompt".to_string(), + Arc::new(amplifier_core::testing::FakeContextManager::new()), + Default::default(), + Default::default(), + json!({}), + json!({}), + ) + .await; + + let response = result.expect("orchestrator.execute() should succeed"); + assert!( + !response.is_empty(), + "orchestrator should return a non-empty response, got empty string" + ); +} + +// --------------------------------------------------------------------------- +// Test 8: Calculator tool — loads and resolves from examples/wasm-modules/ +// --------------------------------------------------------------------------- + +/// Load `calculator-tool.wasm` from the examples directory via `load_wasm_tool` +/// and verify that spec.name == "calculator". +/// +/// This proves the developer authoring workflow: a fresh project using the +/// amplifier-guest SDK compiles, produces a valid .wasm component, and loads +/// correctly through the standard transport pipeline. +#[test] +fn calculator_tool_loads_and_resolves() { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../examples/wasm-modules/calculator-tool.wasm"); + let bytes = std::fs::read(&path).unwrap_or_else(|e| { + panic!( + "calculator-tool.wasm not found at {}: {}", + path.display(), + e + ) + }); + + let engine = make_engine(); + let tool = load_wasm_tool(&bytes, engine).expect("load_wasm_tool should succeed"); + + assert_eq!( + tool.name(), + "calculator", + "spec.name should be 'calculator'" + ); + + let spec = tool.get_spec(); + assert_eq!(spec.name, "calculator", "spec.name mismatch"); + assert!(spec.description.is_some(), "spec.description should be set"); +} diff --git a/crates/amplifier-core/tests/wasm_resource_limits.rs b/crates/amplifier-core/tests/wasm_resource_limits.rs new file mode 100644 index 0000000..1b8a2f4 --- /dev/null +++ b/crates/amplifier-core/tests/wasm_resource_limits.rs @@ -0,0 +1,33 @@ +//! Verifies that WASM modules with infinite loops are terminated +//! by epoch interruption and do not hang indefinitely. + +#[cfg(feature = "wasm")] +#[tokio::test] +async fn infinite_loop_wasm_module_is_terminated() { + use std::time::{Duration, Instant}; + + let engine = amplifier_core::bridges::create_wasm_engine().unwrap(); + + // Locate the infinite-loop fixture relative to CARGO_MANIFEST_DIR. + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let candidates = [ + manifest.join("../../../tests/fixtures/wasm/infinite-loop.wasm"), + manifest.join("../../tests/fixtures/wasm/infinite-loop.wasm"), + ]; + let bytes = candidates + .iter() + .find(|p| p.exists()) + .map(|p| std::fs::read(p).unwrap()) + .expect("infinite-loop.wasm not found"); + + let start = Instant::now(); + let result = amplifier_core::bridges::wasm_tool::WasmToolBridge::from_bytes(&bytes, engine); + let elapsed = start.elapsed(); + + assert!(result.is_err(), "Infinite loop should be trapped"); + assert!( + elapsed < Duration::from_secs(60), + "Should terminate within timeout, took {:?}", + elapsed + ); +} diff --git a/crates/amplifier-guest/Cargo.toml b/crates/amplifier-guest/Cargo.toml new file mode 100644 index 0000000..185ffd9 --- /dev/null +++ b/crates/amplifier-guest/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "amplifier-guest" +version = "0.1.0" +edition = "2021" + +[features] +default = ["kernel-stub"] +kernel-stub = [] + +[dependencies] +prost = "0.13" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +wit-bindgen = "0.41" diff --git a/crates/amplifier-guest/src/lib.rs b/crates/amplifier-guest/src/lib.rs new file mode 100644 index 0000000..a89ac95 --- /dev/null +++ b/crates/amplifier-guest/src/lib.rs @@ -0,0 +1,1730 @@ +pub mod types; +pub use types::*; + +/// Re-export serde_json::Value for convenience. +pub use serde_json::Value; + +/// Hidden re-exports used by the `export_*!` macros. +/// Not part of the public API — do not depend on these directly. +/// Add new re-exports here as future macros require them. +#[doc(hidden)] +pub mod __macro_support { + pub use serde_json; + pub use std::sync::OnceLock; +} + +/// Trait for WASM guest tool implementations. +/// +/// All methods are synchronous — WASM guests are single-threaded. +pub trait Tool { + /// Returns the tool's name. + fn name(&self) -> &str; + + /// Returns the tool specification (name, parameters, description). + fn get_spec(&self) -> ToolSpec; + + /// Executes the tool with the given JSON input. + fn execute(&self, input: Value) -> Result; +} + +/// Exports a [`Tool`] implementation as WASM guest entry points. +/// +/// Creates a singleton instance via `OnceLock` and generates `#[no_mangle] pub extern "C"` +/// functions that the host can call to discover and invoke the tool. +/// +/// # Usage +/// +/// ```ignore +/// #[derive(Default)] +/// struct MyTool; +/// +/// impl Tool for MyTool { /* ... */ } +/// +/// export_tool!(MyTool); +/// ``` +/// +/// # Generated items +/// +/// - `get_tool() -> &'static $tool_type` — returns the singleton instance +/// - `__amplifier_tool_get_spec_len() -> u32` — byte length of the JSON-serialized [`ToolSpec`] +/// - `__amplifier_tool_get_spec(ptr: *mut u8)` — writes serialized spec into caller-provided buffer +/// +/// **Note:** This is a simplified scaffold. Real Component Model exports use `wit-bindgen`'s +/// `generate!` macro. The macro internals will be adjusted when compiling the first fixture, +/// but the module-author interface (`impl Tool` + `export_tool!`) will not change. +/// Exports a [`Tool`] implementation as WASM guest entry points. +/// +/// On **native targets** (testing), generates `#[no_mangle] pub extern "C"` functions +/// for spec introspection (`__amplifier_tool_get_spec_len`, `__amplifier_tool_get_spec`). +/// +/// On **wasm32 targets** (Component Model), generates `wit-bindgen` Guest trait +/// implementation and component exports. Requires the calling crate to declare +/// `mod bindings;` (generated by `cargo component`) and depend on `wit-bindgen-rt`. +/// +/// # Usage +/// +/// ```ignore +/// #[derive(Default)] +/// struct MyTool; +/// +/// impl Tool for MyTool { /* ... */ } +/// +/// export_tool!(MyTool); +/// ``` +/// +/// # Generated items +/// +/// - `get_tool() -> &'static $tool_type` — returns the singleton instance +/// - (native) `__amplifier_tool_get_spec_len() -> u32` — byte length of the JSON-serialized [`ToolSpec`] +/// - (native) `__amplifier_tool_get_spec(ptr: *mut u8)` — writes serialized spec into caller-provided buffer +/// - (wasm32) `impl bindings::exports::amplifier::modules::tool::Guest` — Component Model bridge +/// - (wasm32) `bindings::export!` invocation — wires up component exports +#[macro_export] +macro_rules! export_tool { + ($tool_type:ident) => { + // Tool singleton, lazily initialised via Default. + static __AMPLIFIER_TOOL: $crate::__macro_support::OnceLock<$tool_type> = + $crate::__macro_support::OnceLock::new(); + + /// Returns a reference to the tool singleton. + fn get_tool() -> &'static $tool_type { + __AMPLIFIER_TOOL + .get_or_init(|| <$tool_type as ::std::default::Default>::default()) + } + + // ----- Native target: C-ABI exports for testing ----- + + // Lazily cached JSON representation of the ToolSpec. + #[cfg(not(target_arch = "wasm32"))] + static __AMPLIFIER_SPEC_CACHE: $crate::__macro_support::OnceLock<::std::vec::Vec> = + $crate::__macro_support::OnceLock::new(); + + #[cfg(not(target_arch = "wasm32"))] + fn __amplifier_cached_spec() -> &'static [u8] { + __AMPLIFIER_SPEC_CACHE.get_or_init(|| { + let spec = <$tool_type as $crate::Tool>::get_spec(get_tool()); + $crate::__macro_support::serde_json::to_vec(&spec) + .expect("ToolSpec serialization must not fail") + }) + } + + /// Returns the byte length of the JSON-serialized [`ToolSpec`]. + #[cfg(not(target_arch = "wasm32"))] + #[no_mangle] + pub extern "C" fn __amplifier_tool_get_spec_len() -> u32 { + __amplifier_cached_spec().len().try_into() + .expect("serialized ToolSpec must fit in u32 (WASM linear memory is ≤ 4 GiB)") + } + + /// Copies the JSON-serialized [`ToolSpec`] into the buffer at `ptr`. + /// + /// # Safety + /// + /// The caller must provide a non-null buffer of at least + /// `__amplifier_tool_get_spec_len()` bytes. + #[cfg(not(target_arch = "wasm32"))] + #[no_mangle] + pub unsafe extern "C" fn __amplifier_tool_get_spec(ptr: *mut u8) { + if ptr.is_null() { + return; + } + let json = __amplifier_cached_spec(); + ::std::ptr::copy_nonoverlapping(json.as_ptr(), ptr, json.len()); + } + + // ----- WASM target: Component Model exports ----- + + #[cfg(target_arch = "wasm32")] + impl bindings::exports::amplifier::modules::tool::Guest for $tool_type { + fn get_spec() -> ::std::vec::Vec { + let spec = <$tool_type as $crate::Tool>::get_spec(get_tool()); + $crate::__macro_support::serde_json::to_vec(&spec) + .expect("ToolSpec serialization must not fail") + } + + fn execute(input: ::std::vec::Vec) -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + let input_val: $crate::Value = + $crate::__macro_support::serde_json::from_slice(&input) + .map_err(|e| e.to_string())?; + let result = <$tool_type as $crate::Tool>::execute(get_tool(), input_val)?; + $crate::__macro_support::serde_json::to_vec(&result) + .map_err(|e| e.to_string()) + } + } + + #[cfg(target_arch = "wasm32")] + bindings::export!($tool_type with_types_in bindings); + }; +} + +// --------------------------------------------------------------------------- +// HookHandler trait +// --------------------------------------------------------------------------- + +/// Trait for WASM guest hook handler implementations. +/// +/// All methods are synchronous — WASM guests are single-threaded. +pub trait HookHandler { + /// Handles a lifecycle event, returning an action the host should take. + fn handle(&self, event: &str, data: Value) -> Result; +} + +/// Exports a [`HookHandler`] implementation as WASM guest entry points. +/// +/// Creates a singleton instance via `OnceLock` and generates accessor functions +/// that the host can call to dispatch hook events. +/// +/// # Usage +/// +/// ```ignore +/// #[derive(Default)] +/// struct MyHook; +/// +/// impl HookHandler for MyHook { /* ... */ } +/// +/// export_hook!(MyHook); +/// ``` +#[macro_export] +macro_rules! export_hook { + ($hook_type:ident) => { + static __AMPLIFIER_HOOK: $crate::__macro_support::OnceLock<$hook_type> = + $crate::__macro_support::OnceLock::new(); + + /// Returns a reference to the hook handler singleton. + fn get_hook() -> &'static $hook_type { + __AMPLIFIER_HOOK + .get_or_init(|| <$hook_type as ::std::default::Default>::default()) + } + + // ----- WASM target: Component Model exports ----- + + #[cfg(target_arch = "wasm32")] + impl bindings::exports::amplifier::modules::hook_handler::Guest for $hook_type { + fn handle(event: ::std::vec::Vec) -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + // Deserialize the event bytes as a JSON object with "event" and "data" fields. + let envelope: $crate::Value = + $crate::__macro_support::serde_json::from_slice(&event) + .map_err(|e| e.to_string())?; + let event_str = envelope.get("event") + .and_then(|v| v.as_str()) + .unwrap_or(""); + let data = envelope.get("data") + .cloned() + .unwrap_or($crate::Value::Null); + let result = <$hook_type as $crate::HookHandler>::handle(get_hook(), event_str, data)?; + $crate::__macro_support::serde_json::to_vec(&result) + .map_err(|e| e.to_string()) + } + } + + #[cfg(target_arch = "wasm32")] + bindings::export!($hook_type with_types_in bindings); + }; +} + +// --------------------------------------------------------------------------- +// ContextManager trait +// --------------------------------------------------------------------------- + +/// Trait for WASM guest context manager implementations. +/// +/// All methods are synchronous — WASM guests are single-threaded. +pub trait ContextManager { + /// Adds a message to the context window. + fn add_message(&self, message: Value) -> Result<(), String>; + + /// Returns all messages in the context window. + fn get_messages(&self) -> Result, String>; + + /// Returns messages relevant to the given request. + fn get_messages_for_request(&self, request: Value) -> Result, String>; + + /// Replaces the context window with the given messages. + fn set_messages(&self, messages: Vec) -> Result<(), String>; + + /// Clears all messages from the context window. + fn clear(&self) -> Result<(), String>; +} + +/// Exports a [`ContextManager`] implementation as WASM guest entry points. +/// +/// Creates a singleton instance via `OnceLock` and generates accessor functions +/// that the host can call to manage context. +/// +/// # Usage +/// +/// ```ignore +/// #[derive(Default)] +/// struct MyContext; +/// +/// impl ContextManager for MyContext { /* ... */ } +/// +/// export_context!(MyContext); +/// ``` +#[macro_export] +macro_rules! export_context { + ($ctx_type:ident) => { + static __AMPLIFIER_CONTEXT: $crate::__macro_support::OnceLock<$ctx_type> = + $crate::__macro_support::OnceLock::new(); + + /// Returns a reference to the context manager singleton. + fn get_context() -> &'static $ctx_type { + __AMPLIFIER_CONTEXT + .get_or_init(|| <$ctx_type as ::std::default::Default>::default()) + } + + // ----- WASM target: Component Model exports ----- + + #[cfg(target_arch = "wasm32")] + impl bindings::exports::amplifier::modules::context_manager::Guest for $ctx_type { + fn add_message(message: ::std::vec::Vec) -> ::core::result::Result<(), ::std::string::String> { + let msg: $crate::Value = + $crate::__macro_support::serde_json::from_slice(&message) + .map_err(|e| e.to_string())?; + <$ctx_type as $crate::ContextManager>::add_message(get_context(), msg) + } + + fn get_messages() -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + let messages = <$ctx_type as $crate::ContextManager>::get_messages(get_context())?; + $crate::__macro_support::serde_json::to_vec(&messages) + .map_err(|e| e.to_string()) + } + + fn get_messages_for_request(params: ::std::vec::Vec) -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + let request: $crate::Value = + $crate::__macro_support::serde_json::from_slice(¶ms) + .map_err(|e| e.to_string())?; + let messages = <$ctx_type as $crate::ContextManager>::get_messages_for_request(get_context(), request)?; + $crate::__macro_support::serde_json::to_vec(&messages) + .map_err(|e| e.to_string()) + } + + fn set_messages(messages: ::std::vec::Vec) -> ::core::result::Result<(), ::std::string::String> { + let msgs: ::std::vec::Vec<$crate::Value> = + $crate::__macro_support::serde_json::from_slice(&messages) + .map_err(|e| e.to_string())?; + <$ctx_type as $crate::ContextManager>::set_messages(get_context(), msgs) + } + + fn clear() -> ::core::result::Result<(), ::std::string::String> { + <$ctx_type as $crate::ContextManager>::clear(get_context()) + } + } + + #[cfg(target_arch = "wasm32")] + bindings::export!($ctx_type with_types_in bindings); + }; +} + +// --------------------------------------------------------------------------- +// ApprovalProvider trait +// --------------------------------------------------------------------------- + +/// Trait for WASM guest approval provider implementations. +/// +/// All methods are synchronous — WASM guests are single-threaded. +pub trait ApprovalProvider { + /// Requests human-in-the-loop approval for a given action. + fn request_approval(&self, request: ApprovalRequest) -> Result; +} + +/// Exports an [`ApprovalProvider`] implementation as WASM guest entry points. +/// +/// Creates a singleton instance via `OnceLock` and generates accessor functions +/// that the host can call to request approvals. +/// +/// # Usage +/// +/// ```ignore +/// #[derive(Default)] +/// struct MyApproval; +/// +/// impl ApprovalProvider for MyApproval { /* ... */ } +/// +/// export_approval!(MyApproval); +/// ``` +#[macro_export] +macro_rules! export_approval { + ($approval_type:ident) => { + static __AMPLIFIER_APPROVAL: $crate::__macro_support::OnceLock<$approval_type> = + $crate::__macro_support::OnceLock::new(); + + /// Returns a reference to the approval provider singleton. + fn get_approval() -> &'static $approval_type { + __AMPLIFIER_APPROVAL + .get_or_init(|| <$approval_type as ::std::default::Default>::default()) + } + + // ----- WASM target: Component Model exports ----- + + #[cfg(target_arch = "wasm32")] + impl bindings::exports::amplifier::modules::approval_provider::Guest for $approval_type { + fn request_approval(request: ::std::vec::Vec) -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + let req: $crate::ApprovalRequest = + $crate::__macro_support::serde_json::from_slice(&request) + .map_err(|e| e.to_string())?; + let result = <$approval_type as $crate::ApprovalProvider>::request_approval(get_approval(), req)?; + $crate::__macro_support::serde_json::to_vec(&result) + .map_err(|e| e.to_string()) + } + } + + #[cfg(target_arch = "wasm32")] + bindings::export!($approval_type with_types_in bindings); + }; +} + +// --------------------------------------------------------------------------- +// Provider trait +// --------------------------------------------------------------------------- + +/// Trait for WASM guest LLM provider implementations. +/// +/// All methods are synchronous — WASM guests are single-threaded. +pub trait Provider { + /// Returns the provider's name. + fn name(&self) -> &str; + + /// Returns metadata about this provider. + fn get_info(&self) -> ProviderInfo; + + /// Lists the models available from this provider. + fn list_models(&self) -> Result, String>; + + /// Sends a chat completion request and returns the response. + fn complete(&self, request: Value) -> Result; + + /// Extracts tool calls from a chat response. + fn parse_tool_calls(&self, response: &ChatResponse) -> Vec; +} + +/// Exports a [`Provider`] implementation as WASM guest entry points. +/// +/// Creates a singleton instance via `OnceLock` and generates accessor functions +/// that the host can call to discover and invoke the provider. +/// +/// # Usage +/// +/// ```ignore +/// #[derive(Default)] +/// struct MyProvider; +/// +/// impl Provider for MyProvider { /* ... */ } +/// +/// export_provider!(MyProvider); +/// ``` +#[macro_export] +macro_rules! export_provider { + ($provider_type:ident) => { + static __AMPLIFIER_PROVIDER: $crate::__macro_support::OnceLock<$provider_type> = + $crate::__macro_support::OnceLock::new(); + + /// Returns a reference to the provider singleton. + fn get_provider() -> &'static $provider_type { + __AMPLIFIER_PROVIDER + .get_or_init(|| <$provider_type as ::std::default::Default>::default()) + } + + // ----- WASM target: Component Model exports ----- + + #[cfg(target_arch = "wasm32")] + impl bindings::exports::amplifier::modules::provider::Guest for $provider_type { + fn get_info() -> ::std::vec::Vec { + let info = <$provider_type as $crate::Provider>::get_info(get_provider()); + $crate::__macro_support::serde_json::to_vec(&info) + .expect("ProviderInfo serialization must not fail") + } + + fn list_models() -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + let models = <$provider_type as $crate::Provider>::list_models(get_provider())?; + $crate::__macro_support::serde_json::to_vec(&models) + .map_err(|e| e.to_string()) + } + + fn complete( + request: ::std::vec::Vec, + ) -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + let req: $crate::Value = + $crate::__macro_support::serde_json::from_slice(&request) + .map_err(|e| e.to_string())?; + let response = <$provider_type as $crate::Provider>::complete(get_provider(), req)?; + $crate::__macro_support::serde_json::to_vec(&response) + .map_err(|e| e.to_string()) + } + + fn parse_tool_calls( + response: ::std::vec::Vec, + ) -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + let resp: $crate::ChatResponse = + $crate::__macro_support::serde_json::from_slice(&response) + .map_err(|e| e.to_string())?; + let calls = + <$provider_type as $crate::Provider>::parse_tool_calls(get_provider(), &resp); + $crate::__macro_support::serde_json::to_vec(&calls) + .map_err(|e| e.to_string()) + } + } + + #[cfg(target_arch = "wasm32")] + bindings::export!($provider_type with_types_in bindings); + }; +} + +// --------------------------------------------------------------------------- +// Orchestrator trait +// --------------------------------------------------------------------------- + +/// Trait for WASM guest orchestrator implementations. +/// +/// All methods are synchronous — WASM guests are single-threaded. +pub trait Orchestrator { + /// Executes an orchestration loop for the given prompt. + fn execute(&self, prompt: String) -> Result; +} + +/// Exports an [`Orchestrator`] implementation as WASM guest entry points. +/// +/// Creates a singleton instance via `OnceLock` and generates accessor functions +/// that the host can call to run orchestration. +/// +/// On **wasm32 targets** (Component Model), generates `wit-bindgen` Guest trait +/// implementation and component exports. Requires the calling crate to declare +/// `mod bindings;` (generated by `cargo component`) and depend on `wit-bindgen-rt`. +/// +/// # Usage +/// +/// ```ignore +/// #[derive(Default)] +/// struct MyOrchestrator; +/// +/// impl Orchestrator for MyOrchestrator { /* ... */ } +/// +/// export_orchestrator!(MyOrchestrator); +/// ``` +#[macro_export] +macro_rules! export_orchestrator { + ($orch_type:ident) => { + static __AMPLIFIER_ORCHESTRATOR: $crate::__macro_support::OnceLock<$orch_type> = + $crate::__macro_support::OnceLock::new(); + + /// Returns a reference to the orchestrator singleton. + fn get_orchestrator() -> &'static $orch_type { + __AMPLIFIER_ORCHESTRATOR + .get_or_init(|| <$orch_type as ::std::default::Default>::default()) + } + + // ----- WASM target: Component Model exports ----- + + #[cfg(target_arch = "wasm32")] + impl bindings::exports::amplifier::modules::orchestrator::Guest for $orch_type { + fn execute( + request: ::std::vec::Vec, + ) -> ::core::result::Result<::std::vec::Vec, ::std::string::String> { + // Deserialize the request bytes as JSON to extract the prompt. + let req: $crate::Value = + $crate::__macro_support::serde_json::from_slice(&request) + .map_err(|e| e.to_string())?; + let prompt = req + .get("prompt") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + let result = + <$orch_type as $crate::Orchestrator>::execute(get_orchestrator(), prompt)?; + $crate::__macro_support::serde_json::to_vec(&result) + .map_err(|e| e.to_string()) + } + } + + #[cfg(target_arch = "wasm32")] + bindings::export!($orch_type with_types_in bindings); + }; +} + +// --------------------------------------------------------------------------- +// Kernel-service import wrappers +// --------------------------------------------------------------------------- + +#[cfg(all(target_arch = "wasm32", not(feature = "kernel-stub")))] +compile_error!( + "kernel:: functions are not yet wired to WIT imports. \ + Set feature = 'kernel-stub' for testing only." +); + +/// Placeholder wrappers for kernel-service WIT imports. +/// +/// These functions will be wired to real WIT imports when the Component Model +/// bindings are generated. Until then, every function returns an `Err` with a +/// descriptive placeholder message. +pub mod kernel { + use crate::types::{HookResult, ToolResult}; + use serde_json::Value; + + /// Executes a tool by name through the kernel. + pub fn execute_tool(_name: &str, _input: &Value) -> Result { + Err("kernel::execute_tool: not yet wired to WIT imports".to_string()) + } + + /// Sends a completion request to a named provider through the kernel. + pub fn complete_with_provider(_name: &str, _request: &Value) -> Result { + Err("kernel::complete_with_provider: not yet wired to WIT imports".to_string()) + } + + /// Emits a lifecycle hook event through the kernel. + pub fn emit_hook(_event: &str, _data: &Value) -> Result { + Err("kernel::emit_hook: not yet wired to WIT imports".to_string()) + } + + /// Retrieves all messages from the kernel's context. + pub fn get_messages() -> Result, String> { + Err("kernel::get_messages: not yet wired to WIT imports".to_string()) + } + + /// Adds a message to the kernel's context. + pub fn add_message(_message: &Value) -> Result<(), String> { + Err("kernel::add_message: not yet wired to WIT imports".to_string()) + } + + /// Retrieves a capability value by name from the kernel. + pub fn get_capability(_name: &str) -> Result { + Err("kernel::get_capability: not yet wired to WIT imports".to_string()) + } + + /// Registers a capability value by name with the kernel. + pub fn register_capability(_name: &str, _value: &Value) -> Result<(), String> { + Err("kernel::register_capability: not yet wired to WIT imports".to_string()) + } +} + +#[cfg(test)] +mod tool_tests { + use super::*; + use serde_json::json; + use std::collections::HashMap; + + // A minimal test tool for verifying the trait contract. + #[derive(Default)] + struct EchoTool; + + impl Tool for EchoTool { + fn name(&self) -> &str { + "echo" + } + + fn get_spec(&self) -> ToolSpec { + let mut params = HashMap::new(); + params.insert("message".to_string(), json!({"type": "string"})); + ToolSpec { + name: "echo".to_string(), + parameters: params, + description: Some("Echoes the input".to_string()), + } + } + + fn execute(&self, input: Value) -> Result { + Ok(ToolResult { + success: true, + output: Some(input), + error: None, + }) + } + } + + #[test] + fn test_tool_trait_name() { + let tool = EchoTool; + assert_eq!(tool.name(), "echo"); + } + + #[test] + fn test_tool_trait_get_spec() { + let tool = EchoTool; + let spec = tool.get_spec(); + assert_eq!(spec.name, "echo"); + assert!(spec.parameters.contains_key("message")); + assert_eq!(spec.description, Some("Echoes the input".to_string())); + } + + #[test] + fn test_tool_trait_execute_success() { + let tool = EchoTool; + let input = json!({"message": "hello"}); + let result = tool.execute(input.clone()); + assert!(result.is_ok()); + let result = result.unwrap(); + assert!(result.success); + assert_eq!(result.output, Some(input)); + } + + #[test] + fn test_tool_trait_get_spec_empty_parameters() { + let spec = ToolSpec { + name: "noop".to_string(), + parameters: HashMap::new(), + description: None, + }; + let json_str = serde_json::to_string(&spec).unwrap(); + let deserialized: ToolSpec = serde_json::from_str(&json_str).unwrap(); + assert!(deserialized.parameters.is_empty()); + // Verify the parameters field serializes as an empty object. + let raw: serde_json::Value = serde_json::from_str(&json_str).unwrap(); + assert_eq!(raw["parameters"], json!({})); + } + + #[test] + fn test_tool_trait_execute_error() { + #[derive(Default)] + struct FailTool; + + impl Tool for FailTool { + fn name(&self) -> &str { + "fail" + } + fn get_spec(&self) -> ToolSpec { + ToolSpec { + name: "fail".to_string(), + parameters: HashMap::new(), + description: None, + } + } + fn execute(&self, _input: Value) -> Result { + Err("something went wrong".to_string()) + } + } + + let tool = FailTool; + let result = tool.execute(json!({})); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "something went wrong"); + } +} + +#[cfg(test)] +mod macro_tests { + use super::*; + use serde_json::json; + use std::collections::HashMap; + + #[derive(Default)] + struct MacroTestTool; + + impl Tool for MacroTestTool { + fn name(&self) -> &str { + "macro_test" + } + fn get_spec(&self) -> ToolSpec { + let mut params = HashMap::new(); + params.insert("x".to_string(), json!({"type": "integer"})); + ToolSpec { + name: "macro_test".to_string(), + parameters: params, + description: Some("A test tool for macro validation".to_string()), + } + } + fn execute(&self, input: Value) -> Result { + Ok(ToolResult { + success: true, + output: Some(input), + error: None, + }) + } + } + + export_tool!(MacroTestTool); + + #[test] + fn test_macro_get_tool_returns_singleton() { + let tool = get_tool(); + assert_eq!(tool.name(), "macro_test"); + // Verify that two calls return the same &'static reference (singleton identity). + let tool2 = get_tool(); + assert!(std::ptr::eq(tool, tool2)); + } + + #[test] + fn test_macro_get_spec_null_pointer_is_safe() { + // A null pointer should not cause UB — the function should return early. + unsafe { + __amplifier_tool_get_spec(std::ptr::null_mut()); + } + } + + #[test] + fn test_macro_get_spec_len_positive() { + let len = __amplifier_tool_get_spec_len(); + assert!(len > 0); + } + + #[test] + fn test_macro_get_spec_len_matches_serialized() { + let len = __amplifier_tool_get_spec_len(); + let spec = get_tool().get_spec(); + let expected_json = serde_json::to_vec(&spec).unwrap(); + assert_eq!(len as usize, expected_json.len()); + } + + #[test] + fn test_macro_get_spec_roundtrip() { + let len = __amplifier_tool_get_spec_len() as usize; + let mut buf = vec![0u8; len]; + // SAFETY: buf is exactly `len` bytes, matching __amplifier_tool_get_spec_len(). + unsafe { __amplifier_tool_get_spec(buf.as_mut_ptr()) }; + + let spec: ToolSpec = serde_json::from_slice(&buf).unwrap(); + assert_eq!(spec.name, "macro_test"); + assert!(spec.parameters.contains_key("x")); + assert_eq!( + spec.description, + Some("A test tool for macro validation".to_string()) + ); + } +} + +// =========================================================================== +// HookHandler trait tests +// =========================================================================== + +#[cfg(test)] +mod hook_handler_tests { + use super::*; + use serde_json::json; + + #[derive(Default)] + struct TestHook; + + impl HookHandler for TestHook { + fn handle(&self, event: &str, _data: Value) -> Result { + match event { + "before_tool" => Ok(HookResult { + action: HookAction::Continue, + reason: Some(format!("allowed: {}", event)), + ..HookResult::default() + }), + "blocked" => Err("denied by policy".to_string()), + _ => Ok(HookResult::default()), + } + } + } + + #[test] + fn test_hook_handler_handle_success() { + let hook = TestHook; + let result = hook.handle("before_tool", json!({"tool": "echo"})); + assert!(result.is_ok()); + let hr = result.unwrap(); + assert_eq!(hr.action, HookAction::Continue); + assert_eq!(hr.reason, Some("allowed: before_tool".to_string())); + } + + #[test] + fn test_hook_handler_handle_error() { + let hook = TestHook; + let result = hook.handle("blocked", json!({})); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "denied by policy"); + } + + #[test] + fn test_hook_handler_handle_default_event() { + let hook = TestHook; + let result = hook.handle("unknown_event", json!(null)); + assert!(result.is_ok()); + let hr = result.unwrap(); + assert_eq!(hr.action, HookAction::Continue); + } +} + +#[cfg(test)] +mod hook_macro_tests { + use super::*; + use serde_json::json; + + #[derive(Default)] + struct MacroTestHook; + + impl HookHandler for MacroTestHook { + fn handle(&self, _event: &str, _data: Value) -> Result { + Ok(HookResult::default()) + } + } + + export_hook!(MacroTestHook); + + #[test] + fn test_export_hook_returns_singleton() { + let hook = get_hook(); + let hook2 = get_hook(); + assert!(std::ptr::eq(hook, hook2)); + } + + #[test] + fn test_export_hook_handle_delegates() { + let hook = get_hook(); + let result = hook.handle("test_event", json!({"key": "value"})); + assert!(result.is_ok()); + let hr = result.unwrap(); + assert_eq!(hr.action, HookAction::Continue); + } +} + +// =========================================================================== +// ContextManager trait tests +// =========================================================================== + +#[cfg(test)] +mod context_manager_tests { + use super::*; + use serde_json::json; + + #[derive(Default)] + struct TestContext { + // Use a RefCell to allow interior mutability for testing. + messages: std::cell::RefCell>, + } + + impl ContextManager for TestContext { + fn add_message(&self, message: Value) -> Result<(), String> { + self.messages.borrow_mut().push(message); + Ok(()) + } + + fn get_messages(&self) -> Result, String> { + Ok(self.messages.borrow().clone()) + } + + fn get_messages_for_request(&self, _request: Value) -> Result, String> { + // Return all messages for any request in this simple test impl. + Ok(self.messages.borrow().clone()) + } + + fn set_messages(&self, messages: Vec) -> Result<(), String> { + *self.messages.borrow_mut() = messages; + Ok(()) + } + + fn clear(&self) -> Result<(), String> { + self.messages.borrow_mut().clear(); + Ok(()) + } + } + + #[test] + fn test_context_manager_add_message() { + let ctx = TestContext::default(); + let result = ctx.add_message(json!({"role": "user", "content": "hello"})); + assert!(result.is_ok()); + assert_eq!(ctx.messages.borrow().len(), 1); + } + + #[test] + fn test_context_manager_get_messages_empty() { + let ctx = TestContext::default(); + let msgs = ctx.get_messages().unwrap(); + assert!(msgs.is_empty()); + } + + #[test] + fn test_context_manager_get_messages_after_add() { + let ctx = TestContext::default(); + ctx.add_message(json!({"role": "user", "content": "hi"})) + .unwrap(); + ctx.add_message(json!({"role": "assistant", "content": "hey"})) + .unwrap(); + let msgs = ctx.get_messages().unwrap(); + assert_eq!(msgs.len(), 2); + assert_eq!(msgs[0]["role"], "user"); + assert_eq!(msgs[1]["role"], "assistant"); + } + + #[test] + fn test_context_manager_get_messages_for_request() { + let ctx = TestContext::default(); + ctx.add_message(json!({"role": "user", "content": "test"})) + .unwrap(); + let msgs = ctx + .get_messages_for_request(json!({"model": "gpt-4"})) + .unwrap(); + assert_eq!(msgs.len(), 1); + } + + #[test] + fn test_context_manager_set_messages() { + let ctx = TestContext::default(); + ctx.add_message(json!("old")).unwrap(); + let new_msgs = vec![json!("a"), json!("b")]; + ctx.set_messages(new_msgs).unwrap(); + let msgs = ctx.get_messages().unwrap(); + assert_eq!(msgs.len(), 2); + assert_eq!(msgs[0], json!("a")); + assert_eq!(msgs[1], json!("b")); + } + + #[test] + fn test_context_manager_clear() { + let ctx = TestContext::default(); + ctx.add_message(json!("msg1")).unwrap(); + ctx.add_message(json!("msg2")).unwrap(); + ctx.clear().unwrap(); + let msgs = ctx.get_messages().unwrap(); + assert!(msgs.is_empty()); + } + + // A ContextManager implementation that returns errors, exercising the Err path. + #[derive(Default)] + struct FailingContext; + + impl ContextManager for FailingContext { + fn add_message(&self, _message: Value) -> Result<(), String> { + Err("context full".to_string()) + } + fn get_messages(&self) -> Result, String> { + Err("storage unavailable".to_string()) + } + fn get_messages_for_request(&self, _request: Value) -> Result, String> { + Err("invalid request".to_string()) + } + fn set_messages(&self, _messages: Vec) -> Result<(), String> { + Err("read-only context".to_string()) + } + fn clear(&self) -> Result<(), String> { + Err("clear not permitted".to_string()) + } + } + + #[test] + fn test_context_manager_add_message_error() { + let ctx = FailingContext; + let result = ctx.add_message(json!({"role": "user"})); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "context full"); + } + + #[test] + fn test_context_manager_get_messages_error() { + let ctx = FailingContext; + let result = ctx.get_messages(); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "storage unavailable"); + } + + #[test] + fn test_context_manager_get_messages_for_request_error() { + let ctx = FailingContext; + let result = ctx.get_messages_for_request(json!({"model": "gpt-4"})); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "invalid request"); + } + + #[test] + fn test_context_manager_set_messages_error() { + let ctx = FailingContext; + let result = ctx.set_messages(vec![json!("msg")]); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "read-only context"); + } + + #[test] + fn test_context_manager_clear_error() { + let ctx = FailingContext; + let result = ctx.clear(); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "clear not permitted"); + } +} + +#[cfg(test)] +mod context_macro_tests { + use super::*; + use serde_json::json; + + #[derive(Default)] + struct MacroTestContext; + + impl ContextManager for MacroTestContext { + fn add_message(&self, _message: Value) -> Result<(), String> { + Ok(()) + } + fn get_messages(&self) -> Result, String> { + Ok(vec![json!({"role": "system", "content": "default"})]) + } + fn get_messages_for_request(&self, _request: Value) -> Result, String> { + Ok(vec![]) + } + fn set_messages(&self, _messages: Vec) -> Result<(), String> { + Ok(()) + } + fn clear(&self) -> Result<(), String> { + Ok(()) + } + } + + export_context!(MacroTestContext); + + #[test] + fn test_export_context_returns_singleton() { + let ctx = get_context(); + let ctx2 = get_context(); + assert!(std::ptr::eq(ctx, ctx2)); + } + + #[test] + fn test_export_context_delegates_get_messages() { + let ctx = get_context(); + let msgs = ctx.get_messages().unwrap(); + assert_eq!(msgs.len(), 1); + assert_eq!(msgs[0]["role"], "system"); + } +} + +// =========================================================================== +// ApprovalProvider trait tests +// =========================================================================== + +#[cfg(test)] +mod approval_provider_tests { + use super::*; + use serde_json::json; + use std::collections::HashMap; + + #[derive(Default)] + struct TestApproval; + + impl ApprovalProvider for TestApproval { + fn request_approval(&self, request: ApprovalRequest) -> Result { + if request.risk_level == "critical" { + Ok(ApprovalResponse { + approved: false, + reason: Some("auto-denied: critical risk".to_string()), + remember: false, + }) + } else { + Ok(ApprovalResponse { + approved: true, + reason: None, + remember: true, + }) + } + } + } + + #[test] + fn test_approval_provider_approve() { + let provider = TestApproval; + let req = ApprovalRequest { + tool_name: "ls".to_string(), + action: "list".to_string(), + details: HashMap::new(), + risk_level: "low".to_string(), + timeout: None, + }; + let result = provider.request_approval(req); + assert!(result.is_ok()); + let resp = result.unwrap(); + assert!(resp.approved); + assert!(resp.remember); + assert!(resp.reason.is_none()); + } + + #[test] + fn test_approval_provider_deny() { + let provider = TestApproval; + let req = ApprovalRequest { + tool_name: "rm".to_string(), + action: "delete".to_string(), + details: { + let mut m = HashMap::new(); + m.insert("path".to_string(), json!("/")); + m + }, + risk_level: "critical".to_string(), + timeout: Some(30.0), + }; + let result = provider.request_approval(req); + assert!(result.is_ok()); + let resp = result.unwrap(); + assert!(!resp.approved); + assert_eq!(resp.reason, Some("auto-denied: critical risk".to_string())); + assert!(!resp.remember); + } +} + +#[cfg(test)] +mod approval_macro_tests { + use super::*; + use std::collections::HashMap; + + #[derive(Default)] + struct MacroTestApproval; + + impl ApprovalProvider for MacroTestApproval { + fn request_approval(&self, _request: ApprovalRequest) -> Result { + Ok(ApprovalResponse { + approved: true, + reason: None, + remember: false, + }) + } + } + + export_approval!(MacroTestApproval); + + #[test] + fn test_export_approval_returns_singleton() { + let ap = get_approval(); + let ap2 = get_approval(); + assert!(std::ptr::eq(ap, ap2)); + } + + #[test] + fn test_export_approval_delegates_request() { + let ap = get_approval(); + let req = ApprovalRequest { + tool_name: "test".to_string(), + action: "run".to_string(), + details: HashMap::new(), + risk_level: "low".to_string(), + timeout: None, + }; + let result = ap.request_approval(req); + assert!(result.is_ok()); + assert!(result.unwrap().approved); + } +} + +// =========================================================================== +// Provider trait tests +// =========================================================================== + +#[cfg(test)] +mod provider_tests { + use super::*; + use serde_json::json; + use std::collections::HashMap; + + #[derive(Default)] + struct TestProvider; + + impl Provider for TestProvider { + fn name(&self) -> &str { + "test-provider" + } + + fn get_info(&self) -> ProviderInfo { + ProviderInfo { + id: "test".to_string(), + display_name: "Test Provider".to_string(), + credential_env_vars: vec!["TEST_API_KEY".to_string()], + capabilities: vec!["chat".to_string()], + defaults: HashMap::new(), + } + } + + fn list_models(&self) -> Result, String> { + Ok(vec![ModelInfo { + id: "test-model".to_string(), + display_name: "Test Model".to_string(), + context_window: 4096, + max_output_tokens: 1024, + capabilities: vec!["chat".to_string()], + defaults: HashMap::new(), + }]) + } + + fn complete(&self, _request: Value) -> Result { + Ok(ChatResponse { + content: vec![json!({"type": "text", "text": "hello"})], + tool_calls: None, + finish_reason: Some("stop".to_string()), + extra: HashMap::new(), + }) + } + + fn parse_tool_calls(&self, response: &ChatResponse) -> Vec { + response.tool_calls.clone().unwrap_or_default() + } + } + + #[test] + fn test_provider_name() { + let p = TestProvider; + assert_eq!(p.name(), "test-provider"); + } + + #[test] + fn test_provider_get_info() { + let p = TestProvider; + let info = p.get_info(); + assert_eq!(info.id, "test"); + assert_eq!(info.display_name, "Test Provider"); + assert_eq!(info.credential_env_vars, vec!["TEST_API_KEY"]); + } + + #[test] + fn test_provider_list_models() { + let p = TestProvider; + let models = p.list_models().unwrap(); + assert_eq!(models.len(), 1); + assert_eq!(models[0].id, "test-model"); + assert_eq!(models[0].context_window, 4096); + } + + #[test] + fn test_provider_complete() { + let p = TestProvider; + let result = p.complete(json!({"messages": []})).unwrap(); + assert_eq!(result.content.len(), 1); + assert_eq!(result.finish_reason, Some("stop".to_string())); + } + + #[test] + fn test_provider_parse_tool_calls_empty() { + let p = TestProvider; + let resp = ChatResponse { + content: vec![], + tool_calls: None, + finish_reason: None, + extra: HashMap::new(), + }; + let calls = p.parse_tool_calls(&resp); + assert!(calls.is_empty()); + } + + #[test] + fn test_provider_parse_tool_calls_present() { + let p = TestProvider; + let resp = ChatResponse { + content: vec![], + tool_calls: Some(vec![json!({"name": "echo", "args": {}})]), + finish_reason: None, + extra: HashMap::new(), + }; + let calls = p.parse_tool_calls(&resp); + assert_eq!(calls.len(), 1); + assert_eq!(calls[0]["name"], "echo"); + } + + #[test] + fn test_provider_complete_error() { + #[derive(Default)] + struct FailProvider; + + impl Provider for FailProvider { + fn name(&self) -> &str { + "fail" + } + fn get_info(&self) -> ProviderInfo { + ProviderInfo { + id: "fail".to_string(), + display_name: "Fail".to_string(), + credential_env_vars: vec![], + capabilities: vec![], + defaults: HashMap::new(), + } + } + fn list_models(&self) -> Result, String> { + Err("not available".to_string()) + } + fn complete(&self, _request: Value) -> Result { + Err("completion failed".to_string()) + } + fn parse_tool_calls(&self, _response: &ChatResponse) -> Vec { + vec![] + } + } + + let p = FailProvider; + assert!(p.list_models().is_err()); + assert_eq!(p.list_models().unwrap_err(), "not available"); + assert!(p.complete(json!({})).is_err()); + assert_eq!(p.complete(json!({})).unwrap_err(), "completion failed"); + } +} + +#[cfg(test)] +mod provider_macro_tests { + use super::*; + use std::collections::HashMap; + + #[derive(Default)] + struct MacroTestProvider; + + impl Provider for MacroTestProvider { + fn name(&self) -> &str { + "macro-provider" + } + fn get_info(&self) -> ProviderInfo { + ProviderInfo { + id: "macro".to_string(), + display_name: "Macro Provider".to_string(), + credential_env_vars: vec![], + capabilities: vec![], + defaults: HashMap::new(), + } + } + fn list_models(&self) -> Result, String> { + Ok(vec![]) + } + fn complete(&self, _request: Value) -> Result { + Ok(ChatResponse { + content: vec![], + tool_calls: None, + finish_reason: None, + extra: HashMap::new(), + }) + } + fn parse_tool_calls(&self, _response: &ChatResponse) -> Vec { + vec![] + } + } + + export_provider!(MacroTestProvider); + + #[test] + fn test_export_provider_returns_singleton() { + let p = get_provider(); + let p2 = get_provider(); + assert!(std::ptr::eq(p, p2)); + } + + #[test] + fn test_export_provider_delegates_name() { + let p = get_provider(); + assert_eq!(p.name(), "macro-provider"); + } +} + +// =========================================================================== +// Orchestrator trait tests +// =========================================================================== + +#[cfg(test)] +mod orchestrator_tests { + use super::*; + + #[derive(Default)] + struct TestOrchestrator; + + impl Orchestrator for TestOrchestrator { + fn execute(&self, prompt: String) -> Result { + Ok(format!("executed: {}", prompt)) + } + } + + #[test] + fn test_orchestrator_execute_success() { + let o = TestOrchestrator; + let result = o.execute("hello".to_string()); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), "executed: hello"); + } + + #[test] + fn test_orchestrator_execute_error() { + #[derive(Default)] + struct FailOrchestrator; + + impl Orchestrator for FailOrchestrator { + fn execute(&self, _prompt: String) -> Result { + Err("orchestration failed".to_string()) + } + } + + let o = FailOrchestrator; + let result = o.execute("test".to_string()); + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), "orchestration failed"); + } +} + +#[cfg(test)] +mod orchestrator_macro_tests { + use super::*; + + #[derive(Default)] + struct MacroTestOrchestrator; + + impl Orchestrator for MacroTestOrchestrator { + fn execute(&self, prompt: String) -> Result { + Ok(format!("macro: {}", prompt)) + } + } + + export_orchestrator!(MacroTestOrchestrator); + + #[test] + fn test_export_orchestrator_returns_singleton() { + let o = get_orchestrator(); + let o2 = get_orchestrator(); + assert!(std::ptr::eq(o, o2)); + } + + #[test] + fn test_export_orchestrator_delegates_execute() { + let o = get_orchestrator(); + let result = o.execute("test".to_string()).unwrap(); + assert_eq!(result, "macro: test"); + } +} + +// =========================================================================== +// kernel module tests +// =========================================================================== + +#[cfg(test)] +mod kernel_tests { + use super::*; + use serde_json::json; + + #[test] + fn test_kernel_execute_tool_returns_placeholder_err() { + let result = kernel::execute_tool("echo", &json!({"msg": "hi"})); + assert!(result.is_err()); + // Must contain a meaningful placeholder message + let err = result.unwrap_err(); + assert!(!err.is_empty()); + } + + #[test] + fn test_kernel_complete_with_provider_returns_placeholder_err() { + let result = kernel::complete_with_provider("openai", &json!({"messages": []})); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(!err.is_empty()); + } + + #[test] + fn test_kernel_emit_hook_returns_placeholder_err() { + let result = kernel::emit_hook("before_tool", &json!({})); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(!err.is_empty()); + } + + #[test] + fn test_kernel_get_messages_returns_placeholder_err() { + let result = kernel::get_messages(); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(!err.is_empty()); + } + + #[test] + fn test_kernel_add_message_returns_placeholder_err() { + let result = kernel::add_message(&json!({"role": "user", "content": "hello"})); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(!err.is_empty()); + } + + #[test] + fn test_kernel_get_capability_returns_placeholder_err() { + let result = kernel::get_capability("tool_execution"); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(!err.is_empty()); + } + + #[test] + fn test_kernel_register_capability_returns_placeholder_err() { + let result = kernel::register_capability("my_cap", &json!({"version": 1})); + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(!err.is_empty()); + } + + #[test] + fn test_kernel_stub_feature_is_enabled_by_default() { + // The kernel-stub feature must be enabled by default so that + // non-wasm builds (including tests) can use the stub functions. + // When targeting wasm32 WITHOUT this feature, a compile_error! + // should prevent compilation. + assert!( + cfg!(feature = "kernel-stub"), + "kernel-stub feature should be enabled by default" + ); + } +} + +// =========================================================================== +// WASM fixture acceptance tests +// =========================================================================== + +#[cfg(test)] +mod wasm_fixture_tests { + use std::path::Path; + + #[test] + fn test_deny_hook_wasm_fixture_exists_and_has_valid_size() { + // The deny-hook.wasm fixture must exist and be > 1000 bytes. + let fixture_path = + Path::new(env!("CARGO_MANIFEST_DIR")).join("../../tests/fixtures/wasm/deny-hook.wasm"); + assert!( + fixture_path.exists(), + "deny-hook.wasm fixture not found at {:?}", + fixture_path + ); + let metadata = std::fs::metadata(&fixture_path).expect("failed to read file metadata"); + assert!( + metadata.len() > 1000, + "deny-hook.wasm is too small: {} bytes (expected > 1000)", + metadata.len() + ); + } + + #[test] + fn test_deny_hook_wasm_fixture_has_wasm_magic_bytes() { + // Verify the file starts with the WASM magic number (\0asm). + let fixture_path = + Path::new(env!("CARGO_MANIFEST_DIR")).join("../../tests/fixtures/wasm/deny-hook.wasm"); + let bytes = std::fs::read(&fixture_path).expect("failed to read wasm file"); + assert!( + bytes.len() >= 4, + "deny-hook.wasm too small to contain magic bytes" + ); + assert_eq!( + &bytes[0..4], + b"\0asm", + "deny-hook.wasm does not start with WASM magic bytes" + ); + } + + #[test] + fn test_memory_context_wasm_fixture_exists_and_has_valid_size() { + // The memory-context.wasm fixture must exist and be > 1000 bytes. + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/memory-context.wasm"); + assert!( + fixture_path.exists(), + "memory-context.wasm fixture not found at {:?}", + fixture_path + ); + let metadata = std::fs::metadata(&fixture_path).expect("failed to read file metadata"); + assert!( + metadata.len() > 1000, + "memory-context.wasm is too small: {} bytes (expected > 1000)", + metadata.len() + ); + } + + #[test] + fn test_memory_context_wasm_fixture_has_wasm_magic_bytes() { + // Verify the file starts with the WASM magic number (\0asm). + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/memory-context.wasm"); + let bytes = std::fs::read(&fixture_path).expect("failed to read wasm file"); + assert!( + bytes.len() >= 4, + "memory-context.wasm too small to contain magic bytes" + ); + assert_eq!( + &bytes[0..4], + b"\0asm", + "memory-context.wasm does not start with WASM magic bytes" + ); + } + + #[test] + fn test_auto_approve_wasm_fixture_exists_and_has_valid_size() { + // The auto-approve.wasm fixture must exist and be > 1000 bytes. + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/auto-approve.wasm"); + assert!( + fixture_path.exists(), + "auto-approve.wasm fixture not found at {:?}", + fixture_path + ); + let metadata = std::fs::metadata(&fixture_path).expect("failed to read file metadata"); + assert!( + metadata.len() > 1000, + "auto-approve.wasm is too small: {} bytes (expected > 1000)", + metadata.len() + ); + } + + #[test] + fn test_auto_approve_wasm_fixture_has_wasm_magic_bytes() { + // Verify the file starts with the WASM magic number (\0asm). + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/auto-approve.wasm"); + let bytes = std::fs::read(&fixture_path).expect("failed to read wasm file"); + assert!( + bytes.len() >= 4, + "auto-approve.wasm too small to contain magic bytes" + ); + assert_eq!( + &bytes[0..4], + b"\0asm", + "auto-approve.wasm does not start with WASM magic bytes" + ); + } + + #[test] + fn test_echo_provider_wasm_fixture_exists_and_has_valid_size() { + // The echo-provider.wasm fixture must exist and be > 1000 bytes. + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/echo-provider.wasm"); + assert!( + fixture_path.exists(), + "echo-provider.wasm fixture not found at {:?}", + fixture_path + ); + let metadata = std::fs::metadata(&fixture_path).expect("failed to read file metadata"); + assert!( + metadata.len() > 1000, + "echo-provider.wasm is too small: {} bytes (expected > 1000)", + metadata.len() + ); + } + + #[test] + fn test_echo_provider_wasm_fixture_has_wasm_magic_bytes() { + // Verify the file starts with the WASM magic number (\0asm). + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/echo-provider.wasm"); + let bytes = std::fs::read(&fixture_path).expect("failed to read wasm file"); + assert!( + bytes.len() >= 4, + "echo-provider.wasm too small to contain magic bytes" + ); + assert_eq!( + &bytes[0..4], + b"\0asm", + "echo-provider.wasm does not start with WASM magic bytes" + ); + } + + #[test] + fn test_passthrough_orchestrator_wasm_fixture_exists_and_has_valid_size() { + // The passthrough-orchestrator.wasm fixture must exist and be > 1000 bytes. + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/passthrough-orchestrator.wasm"); + assert!( + fixture_path.exists(), + "passthrough-orchestrator.wasm fixture not found at {:?}", + fixture_path + ); + let metadata = std::fs::metadata(&fixture_path).expect("failed to read file metadata"); + assert!( + metadata.len() > 1000, + "passthrough-orchestrator.wasm is too small: {} bytes (expected > 1000)", + metadata.len() + ); + } + + #[test] + fn test_passthrough_orchestrator_wasm_fixture_has_wasm_magic_bytes() { + // Verify the file starts with the WASM magic number (\0asm). + let fixture_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/passthrough-orchestrator.wasm"); + let bytes = std::fs::read(&fixture_path).expect("failed to read wasm file"); + assert!( + bytes.len() >= 4, + "passthrough-orchestrator.wasm too small to contain magic bytes" + ); + assert_eq!( + &bytes[0..4], + b"\0asm", + "passthrough-orchestrator.wasm does not start with WASM magic bytes" + ); + } +} diff --git a/crates/amplifier-guest/src/types.rs b/crates/amplifier-guest/src/types.rs new file mode 100644 index 0000000..1f98560 --- /dev/null +++ b/crates/amplifier-guest/src/types.rs @@ -0,0 +1,646 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; + +/// Specification for a tool exposed by a WASM module. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ToolSpec { + pub name: String, + pub parameters: HashMap, + #[serde(skip_serializing_if = "Option::is_none")] + pub description: Option, +} + +/// Result returned from a tool execution. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ToolResult { + #[serde(default = "default_true")] + pub success: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub output: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option>, +} + +fn default_true() -> bool { + true +} + +impl Default for ToolResult { + fn default() -> Self { + Self { + success: true, + output: None, + error: None, + } + } +} + +/// Action a hook handler can take in response to a lifecycle event. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum HookAction { + Continue, + Deny, + Modify, + InjectContext, + AskUser, +} + +/// Role for injected context messages. +/// Serializes with default PascalCase (e.g. "System", "User") per spec. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum ContextInjectionRole { + System, + User, + Assistant, +} + +/// Default behavior when approval times out. +/// Serializes with default PascalCase (e.g. "Allow", "Deny") per spec. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum ApprovalDefault { + Allow, + Deny, +} + +/// Severity level for user-facing messages. +/// Serializes with default PascalCase (e.g. "Info", "Warning") per spec. +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub enum UserMessageLevel { + Info, + Warning, + Error, +} + +/// Full result returned by a hook handler. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct HookResult { + pub action: HookAction, + #[serde(skip_serializing_if = "Option::is_none")] + pub data: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reason: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub context_injection: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub context_injection_role: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub ephemeral: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub approval_prompt: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub approval_options: Option>, + #[serde(default = "default_approval_timeout")] + pub approval_timeout: f64, + #[serde(skip_serializing_if = "Option::is_none")] + pub approval_default: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub suppress_output: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub user_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub user_message_level: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub user_message_source: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub append_to_last_tool_result: Option, +} + +fn default_approval_timeout() -> f64 { + 300.0 +} + +impl Default for HookResult { + fn default() -> Self { + Self { + action: HookAction::Continue, + data: None, + reason: None, + context_injection: None, + context_injection_role: None, + ephemeral: None, + approval_prompt: None, + approval_options: None, + approval_timeout: default_approval_timeout(), + approval_default: None, + suppress_output: None, + user_message: None, + user_message_level: None, + user_message_source: None, + append_to_last_tool_result: None, + } + } +} + +/// Request for human-in-the-loop approval. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ApprovalRequest { + pub tool_name: String, + pub action: String, + pub details: HashMap, + pub risk_level: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub timeout: Option, +} + +/// Response from the approval provider. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ApprovalResponse { + pub approved: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub reason: Option, + pub remember: bool, +} + +/// Metadata about an LLM provider. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ProviderInfo { + pub id: String, + pub display_name: String, + pub credential_env_vars: Vec, + pub capabilities: Vec, + pub defaults: HashMap, +} + +/// Metadata about a specific model. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ModelInfo { + pub id: String, + pub display_name: String, + pub context_window: i64, + pub max_output_tokens: i64, + pub capabilities: Vec, + pub defaults: HashMap, +} + +/// Request for an LLM chat completion. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ChatRequest { + pub messages: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub model: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub temperature: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub max_output_tokens: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +/// Response from an LLM chat completion. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct ChatResponse { + pub content: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_calls: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub finish_reason: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::{json, Value}; + use std::collections::HashMap; + + // --- ToolSpec tests --- + + #[test] + fn test_tool_spec_creation() { + let mut params = HashMap::new(); + params.insert("arg1".to_string(), json!("string")); + let spec = ToolSpec { + name: "my_tool".to_string(), + parameters: params, + description: Some("A test tool".to_string()), + }; + assert_eq!(spec.name, "my_tool"); + assert!(spec.description.is_some()); + } + + #[test] + fn test_tool_spec_serde_roundtrip() { + let mut params = HashMap::new(); + params.insert("x".to_string(), json!(42)); + let spec = ToolSpec { + name: "calc".to_string(), + parameters: params, + description: None, + }; + let json_str = serde_json::to_string(&spec).unwrap(); + let deserialized: ToolSpec = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized.name, "calc"); + assert_eq!(deserialized.parameters.get("x"), Some(&json!(42))); + assert!(deserialized.description.is_none()); + } + + // --- ToolResult tests --- + + #[test] + fn test_tool_result_defaults() { + let result = ToolResult::default(); + assert!(result.success); + assert!(result.output.is_none()); + assert!(result.error.is_none()); + } + + #[test] + fn test_tool_result_serde_roundtrip() { + let result = ToolResult { + success: false, + output: Some(json!("hello")), + error: Some({ + let mut m = HashMap::new(); + m.insert("code".to_string(), json!(404)); + m + }), + }; + let json_str = serde_json::to_string(&result).unwrap(); + let deserialized: ToolResult = serde_json::from_str(&json_str).unwrap(); + assert!(!deserialized.success); + assert_eq!(deserialized.output, Some(json!("hello"))); + let err = deserialized.error.as_ref().unwrap(); + assert_eq!(err.get("code"), Some(&json!(404))); + } + + // --- HookAction tests --- + + #[test] + fn test_hook_action_serde_snake_case() { + let action = HookAction::InjectContext; + let json_str = serde_json::to_string(&action).unwrap(); + assert_eq!(json_str, "\"inject_context\""); + + let action = HookAction::AskUser; + let json_str = serde_json::to_string(&action).unwrap(); + assert_eq!(json_str, "\"ask_user\""); + } + + #[test] + fn test_hook_action_all_variants() { + let variants = vec![ + HookAction::Continue, + HookAction::Deny, + HookAction::Modify, + HookAction::InjectContext, + HookAction::AskUser, + ]; + for v in variants { + let s = serde_json::to_string(&v).unwrap(); + let back: HookAction = serde_json::from_str(&s).unwrap(); + assert_eq!(format!("{:?}", v), format!("{:?}", back)); + } + } + + // --- ContextInjectionRole tests --- + + #[test] + fn test_context_injection_role_variants() { + let roles = vec![ + ContextInjectionRole::System, + ContextInjectionRole::User, + ContextInjectionRole::Assistant, + ]; + for r in roles { + let s = serde_json::to_string(&r).unwrap(); + let back: ContextInjectionRole = serde_json::from_str(&s).unwrap(); + assert_eq!(format!("{:?}", r), format!("{:?}", back)); + } + } + + // --- ApprovalDefault tests --- + + #[test] + fn test_approval_default_variants() { + let vals = vec![ApprovalDefault::Allow, ApprovalDefault::Deny]; + for v in vals { + let s = serde_json::to_string(&v).unwrap(); + let back: ApprovalDefault = serde_json::from_str(&s).unwrap(); + assert_eq!(format!("{:?}", v), format!("{:?}", back)); + } + } + + // --- UserMessageLevel tests --- + + #[test] + fn test_user_message_level_variants() { + let vals = vec![ + UserMessageLevel::Info, + UserMessageLevel::Warning, + UserMessageLevel::Error, + ]; + for v in vals { + let s = serde_json::to_string(&v).unwrap(); + let back: UserMessageLevel = serde_json::from_str(&s).unwrap(); + assert_eq!(format!("{:?}", v), format!("{:?}", back)); + } + } + + // --- HookResult tests --- + + #[test] + fn test_hook_result_defaults() { + let hr = HookResult::default(); + assert_eq!(hr.approval_timeout, 300.0); + } + + #[test] + fn test_hook_result_serde_roundtrip() { + let hr = HookResult { + action: HookAction::Continue, + data: None, + reason: Some("test reason".to_string()), + context_injection: None, + context_injection_role: None, + ephemeral: None, + approval_prompt: None, + approval_options: None, + approval_timeout: 300.0, + approval_default: None, + suppress_output: None, + user_message: None, + user_message_level: None, + user_message_source: None, + append_to_last_tool_result: None, + }; + let json_str = serde_json::to_string(&hr).unwrap(); + let deserialized: HookResult = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized.action, HookAction::Continue); + assert!(deserialized.data.is_none()); + assert_eq!(deserialized.reason, Some("test reason".to_string())); + assert!(deserialized.context_injection.is_none()); + assert!(deserialized.context_injection_role.is_none()); + assert!(deserialized.ephemeral.is_none()); + assert!(deserialized.approval_prompt.is_none()); + assert!(deserialized.approval_options.is_none()); + assert_eq!(deserialized.approval_timeout, 300.0); + assert!(deserialized.approval_default.is_none()); + assert!(deserialized.suppress_output.is_none()); + assert!(deserialized.user_message.is_none()); + assert!(deserialized.user_message_level.is_none()); + assert!(deserialized.user_message_source.is_none()); + assert!(deserialized.append_to_last_tool_result.is_none()); + } + + // --- ApprovalRequest tests --- + + #[test] + fn test_approval_request_creation() { + let req = ApprovalRequest { + tool_name: "rm".to_string(), + action: "delete".to_string(), + details: { + let mut m = HashMap::new(); + m.insert("path".to_string(), json!("/tmp/test")); + m + }, + risk_level: "high".to_string(), + timeout: Some(60.0), + }; + assert_eq!(req.tool_name, "rm"); + assert_eq!(req.risk_level, "high"); + } + + #[test] + fn test_approval_request_serde_roundtrip() { + let req = ApprovalRequest { + tool_name: "tool".to_string(), + action: "exec".to_string(), + details: HashMap::new(), + risk_level: "low".to_string(), + timeout: None, + }; + let json_str = serde_json::to_string(&req).unwrap(); + let deserialized: ApprovalRequest = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized.tool_name, "tool"); + assert_eq!(deserialized.action, "exec"); + assert!(deserialized.details.is_empty()); + assert_eq!(deserialized.risk_level, "low"); + assert!(deserialized.timeout.is_none()); + } + + // --- ApprovalResponse tests --- + + #[test] + fn test_approval_response_creation() { + let resp = ApprovalResponse { + approved: true, + reason: Some("looks safe".to_string()), + remember: false, + }; + assert!(resp.approved); + assert!(!resp.remember); + } + + // --- ProviderInfo tests --- + + #[test] + fn test_provider_info_creation() { + let info = ProviderInfo { + id: "openai".to_string(), + display_name: "OpenAI".to_string(), + credential_env_vars: vec!["OPENAI_API_KEY".to_string()], + capabilities: vec!["chat".to_string()], + defaults: { + let mut m = HashMap::new(); + m.insert("model".to_string(), json!("gpt-4")); + m + }, + }; + assert_eq!(info.id, "openai"); + assert_eq!(info.credential_env_vars.len(), 1); + } + + // --- ModelInfo tests --- + + #[test] + fn test_model_info_creation() { + let info = ModelInfo { + id: "gpt-4".to_string(), + display_name: "GPT-4".to_string(), + context_window: 128000, + max_output_tokens: 4096, + capabilities: vec!["chat".to_string(), "tools".to_string()], + defaults: HashMap::new(), + }; + assert_eq!(info.context_window, 128000); + assert_eq!(info.max_output_tokens, 4096); + } + + // --- ChatRequest tests --- + + #[test] + fn test_chat_request_serde_roundtrip() { + let req = ChatRequest { + messages: vec![json!({"role": "user", "content": "hello"})], + model: Some("gpt-4".to_string()), + temperature: Some(0.7), + max_output_tokens: Some(1024), + extra: { + let mut m = HashMap::new(); + m.insert("stream".to_string(), json!(false)); + m + }, + }; + let json_str = serde_json::to_string(&req).unwrap(); + // #[serde(flatten)] causes extra fields to appear at the top level in JSON. + // On deserialization, any top-level key not matching a named field is absorbed + // into the `extra` HashMap, providing extensible wire-format support. + let v: Value = serde_json::from_str(&json_str).unwrap(); + assert_eq!(v["model"], json!("gpt-4")); + assert_eq!(v["stream"], json!(false)); + + let deserialized: ChatRequest = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized.model, Some("gpt-4".to_string())); + assert_eq!(deserialized.extra.get("stream"), Some(&json!(false))); + } + + // --- ChatResponse tests --- + + #[test] + fn test_chat_response_serde_roundtrip() { + let resp = ChatResponse { + content: vec![json!({"type": "text", "text": "Hello!"})], + tool_calls: None, + finish_reason: Some("stop".to_string()), + extra: HashMap::new(), + }; + let json_str = serde_json::to_string(&resp).unwrap(); + let deserialized: ChatResponse = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized.content.len(), 1); + assert_eq!(deserialized.content[0]["text"], json!("Hello!")); + assert!(deserialized.tool_calls.is_none()); + assert_eq!(deserialized.finish_reason, Some("stop".to_string())); + assert!(deserialized.extra.is_empty()); + } + + // --- PartialEq roundtrip tests --- + + #[test] + fn test_tool_spec_partial_eq_roundtrip() { + let mut params = HashMap::new(); + params.insert("x".to_string(), json!(42)); + let original = ToolSpec { + name: "calc".to_string(), + parameters: params, + description: Some("calculator".to_string()), + }; + let json_str = serde_json::to_string(&original).unwrap(); + let deserialized: ToolSpec = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized, original); + } + + #[test] + fn test_tool_result_partial_eq_roundtrip() { + let original = ToolResult { + success: false, + output: Some(json!("hello")), + error: Some({ + let mut m = HashMap::new(); + m.insert("code".to_string(), json!(404)); + m + }), + }; + let json_str = serde_json::to_string(&original).unwrap(); + let deserialized: ToolResult = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized, original); + } + + #[test] + fn test_hook_result_partial_eq_roundtrip() { + let original = HookResult { + action: HookAction::InjectContext, + data: Some(json!({"key": "value"})), + reason: Some("test reason".to_string()), + context_injection: Some("injected".to_string()), + context_injection_role: Some(ContextInjectionRole::System), + ephemeral: Some(true), + approval_prompt: Some("approve?".to_string()), + approval_options: Some(vec!["yes".to_string(), "no".to_string()]), + approval_timeout: 300.0, + approval_default: Some(ApprovalDefault::Deny), + suppress_output: Some(false), + user_message: Some("msg".to_string()), + user_message_level: Some(UserMessageLevel::Warning), + user_message_source: Some("hook".to_string()), + append_to_last_tool_result: Some(json!("extra")), + }; + let json_str = serde_json::to_string(&original).unwrap(); + let deserialized: HookResult = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized, original); + } + + #[test] + fn test_approval_request_partial_eq_roundtrip() { + let original = ApprovalRequest { + tool_name: "rm".to_string(), + action: "delete".to_string(), + details: { + let mut m = HashMap::new(); + m.insert("path".to_string(), json!("/tmp/test")); + m + }, + risk_level: "high".to_string(), + timeout: Some(60.0), + }; + let json_str = serde_json::to_string(&original).unwrap(); + let deserialized: ApprovalRequest = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized, original); + } + + #[test] + fn test_approval_response_partial_eq_roundtrip() { + let original = ApprovalResponse { + approved: true, + reason: Some("looks safe".to_string()), + remember: false, + }; + let json_str = serde_json::to_string(&original).unwrap(); + let deserialized: ApprovalResponse = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized, original); + } + + #[test] + fn test_provider_info_partial_eq_roundtrip() { + let original = ProviderInfo { + id: "openai".to_string(), + display_name: "OpenAI".to_string(), + credential_env_vars: vec!["OPENAI_API_KEY".to_string()], + capabilities: vec!["chat".to_string()], + defaults: { + let mut m = HashMap::new(); + m.insert("model".to_string(), json!("gpt-4")); + m + }, + }; + let json_str = serde_json::to_string(&original).unwrap(); + let deserialized: ProviderInfo = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized, original); + } + + #[test] + fn test_model_info_partial_eq_roundtrip() { + let original = ModelInfo { + id: "gpt-4".to_string(), + display_name: "GPT-4".to_string(), + context_window: 128000, + max_output_tokens: 4096, + capabilities: vec!["chat".to_string(), "tools".to_string()], + defaults: HashMap::new(), + }; + let json_str = serde_json::to_string(&original).unwrap(); + let deserialized: ModelInfo = serde_json::from_str(&json_str).unwrap(); + assert_eq!(deserialized, original); + } + + // --- Re-export test --- + + #[test] + fn test_value_reexport() { + // Verify serde_json::Value is re-exported from the crate root + let _v: serde_json::Value = json!(42); + } +} diff --git a/crates/amplifier-guest/wit/amplifier-modules.wit b/crates/amplifier-guest/wit/amplifier-modules.wit new file mode 100644 index 0000000..ff6bdbd --- /dev/null +++ b/crates/amplifier-guest/wit/amplifier-modules.wit @@ -0,0 +1,155 @@ +// WIT interface definitions for Amplifier WASM modules. +// +// Defines the contract between host (kernel) and guest (WASM modules). +// All complex types are serialized as protobuf bytes (list) to avoid +// duplicating the full proto schema in WIT. The canonical proto definitions +// live in proto/amplifier_module.proto. + +package amplifier:modules@1.0.0; + +// --------------------------------------------------------------------------- +// Tier 1: Pure-compute interfaces (no host imports required) +// --------------------------------------------------------------------------- + +/// Tool module interface — exposes a single tool to the kernel. +interface tool { + /// Return the tool specification (ToolSpec proto, serialized). + get-spec: func() -> list; + + /// Execute the tool with proto-serialized input (ToolExecuteRequest). + /// Returns proto-serialized ToolExecuteResponse on success. + execute: func(input: list) -> result, string>; +} + +/// Hook handler interface — responds to lifecycle events. +interface hook-handler { + /// Handle a lifecycle event (HookHandleRequest proto, serialized). + /// Returns proto-serialized HookResult on success. + handle: func(event: list) -> result, string>; +} + +/// Context manager interface — owns conversation memory policy. +interface context-manager { + /// Append a message to the context (Message proto, serialized). + add-message: func(message: list) -> result<_, string>; + + /// Get all messages (raw, uncompacted). Returns GetMessagesResponse proto. + get-messages: func() -> result, string>; + + /// Get messages for an LLM request (compacted). Accepts + /// GetMessagesForRequestParams proto, returns GetMessagesResponse proto. + get-messages-for-request: func(params: list) -> result, string>; + + /// Replace the entire message list (SetMessagesRequest proto). + set-messages: func(messages: list) -> result<_, string>; + + /// Clear all messages from context. + clear: func() -> result<_, string>; +} + +/// Approval provider interface — human-in-the-loop approval gate. +interface approval-provider { + /// Request approval from the user (ApprovalRequest proto, serialized). + /// Returns proto-serialized ApprovalResponse on success. + request-approval: func(request: list) -> result, string>; +} + +// --------------------------------------------------------------------------- +// Tier 2: Interfaces that may need host imports or network access +// --------------------------------------------------------------------------- + +/// Provider interface — LLM completions in any language. +interface provider { + /// Return provider metadata (ProviderInfo proto, serialized). + get-info: func() -> list; + + /// List available models. Returns ListModelsResponse proto. + list-models: func() -> result, string>; + + /// Generate a completion (ChatRequest proto → ChatResponse proto). + complete: func(request: list) -> result, string>; + + /// Extract tool calls from a response (ChatResponse proto → + /// ParseToolCallsResponse proto). + parse-tool-calls: func(response: list) -> result, string>; +} + +/// Orchestrator interface — high-level agent-loop execution. +interface orchestrator { + /// Run the agent loop (OrchestratorExecuteRequest proto → + /// OrchestratorExecuteResponse proto). + execute: func(request: list) -> result, string>; +} + +// --------------------------------------------------------------------------- +// Host interface: kernel callbacks available to guest modules +// --------------------------------------------------------------------------- + +/// Kernel service interface — host-provided callbacks for guest modules. +/// Orchestrator and provider modules import this to call back into the kernel. +interface kernel-service { + /// Execute a tool by name (ExecuteToolRequest proto → ToolResult proto). + execute-tool: func(request: list) -> result, string>; + + /// Complete with a named provider (CompleteWithProviderRequest proto → + /// ChatResponse proto). + complete-with-provider: func(request: list) -> result, string>; + + /// Emit a hook event (EmitHookRequest proto → HookResult proto). + emit-hook: func(request: list) -> result, string>; + + /// Get conversation messages (GetMessagesRequest proto → + /// GetMessagesResponse proto). + get-messages: func(request: list) -> result, string>; + + /// Add a message to conversation (KernelAddMessageRequest proto). + add-message: func(request: list) -> result<_, string>; + + /// Look up a registered capability (GetCapabilityRequest proto → + /// GetCapabilityResponse proto). + get-capability: func(request: list) -> result, string>; + + /// Register a capability (RegisterCapabilityRequest proto). + register-capability: func(request: list) -> result<_, string>; +} + +// --------------------------------------------------------------------------- +// World definitions — one per module type +// --------------------------------------------------------------------------- + +/// Tier 1: Pure-compute tool module. +world tool-module { + export tool; +} + +/// Tier 1: Pure-compute hook handler module. +world hook-module { + export hook-handler; +} + +/// Tier 1: Pure-compute context manager module. +world context-module { + export context-manager; +} + +/// Tier 1: Pure-compute approval provider module. +world approval-module { + export approval-provider; +} + +/// Tier 2: Provider module — pure-compute for now; HTTP deferred. +/// +/// Note: `wasi:http/outgoing-handler` was previously listed here but removed +/// because `wasmtime-wasi-http` is not a dependency and the runtime linker +/// does not provide it. Provider WASM modules compiled against the HTTP +/// import would fail at instantiation. +world provider-module { + // HTTP imports deferred — requires wasmtime-wasi-http dependency (future work) + export provider; +} + +/// Tier 2: Orchestrator module — needs kernel callbacks for the agent loop. +world orchestrator-module { + import kernel-service; + export orchestrator; +} \ No newline at end of file diff --git a/docs/plans/2026-03-03-audit-fix-design.md b/docs/plans/2026-03-03-audit-fix-design.md index 5bdc6ca..b563cce 100644 --- a/docs/plans/2026-03-03-audit-fix-design.md +++ b/docs/plans/2026-03-03-audit-fix-design.md @@ -180,12 +180,16 @@ All `log::warn!()` — data integrity issues at wire boundaries. No behavioral c These are acknowledged incomplete gRPC protocol implementations, not bugs with silent fallbacks. They require proto schema changes and are Phase 2/4 work items per the cross-language SDK roadmap. **Document, don't fix.** -| Finding | File | Gap | Action | -|---------|------|-----|--------| -| S-1 | `grpc_context.rs` | Message fields (role, name, tool_call_id, metadata) zeroed | `log::debug!()` + `// TODO(grpc-v2):` comment | -| S-2 | `grpc_context.rs` | BlockContent variants → Null | `log::debug!()` when non-TextContent encountered + `// TODO(grpc-v2):` comment | -| S-3 | `grpc_context.rs` | `provider_name` not transmitted | `log::debug!()` + `// TODO(grpc-v2):` comment | -| S-4 | `grpc_orchestrator.rs` | 5 orchestrator parameters discarded | `log::debug!()` + `// TODO(grpc-v2):` comment | +> **✅ RESOLVED (2026-03-05):** All S-1 through S-4 structural gaps were fully fixed in the gRPC Phase 2 debt fix work +> (`docs/plans/2026-03-04-grpc-v2-debt-fix-design.md`). All `TODO(grpc-v2)` markers have been removed from source code. +> The table below reflects the original action taken; actual fixes are described in the debt fix design and implementation docs. + +| Finding | File | Gap | Resolution | +|---------|------|-----|------------| +| S-1 | `grpc_context.rs` | Message fields (role, name, tool_call_id, metadata) zeroed | Fixed: full bidirectional conversion implemented | +| S-2 | `grpc_context.rs` | BlockContent variants → Null | Fixed: all BlockContent variants converted | +| S-3 | `grpc_context.rs` | `provider_name` not transmitted | Fixed: provider_name transmitted via proto field | +| S-4 | `grpc_orchestrator.rs` | 5 orchestrator parameters discarded | Fixed: remote orchestrators access these via KernelService RPCs using session_id | **Log level: `debug`**, not `warn`. These are known limitations, not unexpected failures. An operator running at `debug` level sees them; normal operation stays quiet. diff --git a/docs/plans/2026-03-04-grpc-v2-debt-fix-design.md b/docs/plans/2026-03-04-grpc-v2-debt-fix-design.md new file mode 100644 index 0000000..963bec3 --- /dev/null +++ b/docs/plans/2026-03-04-grpc-v2-debt-fix-design.md @@ -0,0 +1,239 @@ +# gRPC Phase 2 Debt Fix Design + +## Goal + +Fix all gRPC Phase 2 debt in amplifier-core — 15 code `TODO(grpc-v2)` markers across 4 bridge files, implement 8 stubbed KernelService RPCs, and make remote cross-language orchestrators fully functional. + +## Background + +The gRPC bridge layer was built during the Rust kernel migration with known data loss documented via `TODO(grpc-v2)` markers and `log::debug!()` calls. The audit design doc (`docs/plans/2026-03-03-audit-fix-design.md`) prescribed "document, don't fix" as the initial strategy. This design addresses the actual fixes. + +15 code TODOs across 4 files: + +- `conversions.rs`: 3 (Usage optional fields) +- `grpc_context.rs`: 8 (message fields + content + provider_name) +- `grpc_approval.rs`: 2 (optional timeout) +- `grpc_orchestrator.rs`: 2 (session_id + discarded params) + +Plus 8 of 9 KernelService RPCs stubbed as `Status::unimplemented` in `grpc_server.rs`. Only `ExecuteTool` is implemented. + +## Approach + +Single PR, 4 layered commits working bottom-up through the dependency chain: + +1. Proto schema fixes +2. Bidirectional conversions (bulk of the work) +3. Session routing and bridge fixes +4. KernelService RPC implementation + +Changes are tightly coupled — proto schema changes flow into bridge fixes which flow into KernelService. Splitting across PRs would mean intermediate states where the proto is updated but bridges aren't. Layered commits within one PR give clean git history while shipping atomically. + +## Architecture + +The fix touches four layers of the gRPC subsystem, each building on the one below: + +``` +┌─────────────────────────────────────────────────┐ +│ Layer 4: KernelService RPCs (grpc_server.rs) │ ← Remote modules call back +├─────────────────────────────────────────────────┤ +│ Layer 3: Bridge Fixes (orchestrator, context, │ ← Session routing, params +│ approval, provider) │ +├─────────────────────────────────────────────────┤ +│ Layer 2: Conversions (Message, ChatRequest, │ ← ~60% of total effort +│ ChatResponse, HookResult) │ +├─────────────────────────────────────────────────┤ +│ Layer 1: Proto Schema (amplifier_module.proto) │ ← optional fields +└─────────────────────────────────────────────────┘ +``` + +## Components + +### Layer 1: Proto Schema Fixes + +Add `optional` keyword to 5 fields in `proto/amplifier_module.proto`: + +```protobuf +// Usage message — 3 token count fields +optional int32 reasoning_tokens = 4; +optional int32 cache_read_tokens = 5; +optional int32 cache_creation_tokens = 6; + +// ApprovalRequest — 1 timeout field +optional double timeout = 5; + +// HookResult — 1 timeout field (same None/0.0 ambiguity) +optional double approval_timeout = 9; +``` + +**Why:** Proto3 bare scalars default to `0`/`0.0` on the wire, making `None` (not reported / wait forever) and `Some(0)` (zero tokens / expire immediately) indistinguishable. The `optional` keyword generates `Option` in Rust. + +**Wire compatibility:** Adding `optional` to an existing proto3 field is backward-compatible — old readers treat the field the same way, new readers get `Option`. + +**After proto change:** Regenerate Rust code via `cargo build` (with protoc installed — `build.rs` auto-regenerates `src/generated/amplifier.module.rs`). Commit both the proto change AND the regenerated Rust code together. Update `conversions.rs` to map `None ↔ None` instead of `unwrap_or(0)`, and update `grpc_approval.rs` to send `None` instead of `0.0`. + +### Layer 2: Bidirectional Conversions + +This is the foundation for everything else and the bulk of the work (~60% of total effort). Build complete bidirectional conversions between native Rust types and proto types. + +**New conversions to write:** + +1. **`Message ↔ proto::Message`** (with ContentBlock, Role mapping): + - `value_to_proto_message()`: Use `serde_json::from_value::(value)` for type-safe parsing (not hand-parsing JSON keys). Map `Role` enum to proto `Role`, extract `name`, `tool_call_id`, `metadata` (serialize to JSON string), handle both `MessageContent::Text` (→ TextContent) and `MessageContent::Blocks` (→ BlockContent with all 7 ContentBlock variants: text, thinking, redacted_thinking, tool_call, tool_result, image, reasoning) + - `proto_message_to_value()`: Full fidelity reverse — map proto Role back to string, populate name/tool_call_id/metadata, handle BlockContent by iterating proto ContentBlock entries + +2. **`ChatRequest ↔ proto::ChatRequest`** (with ToolSpec, ResponseFormat): + - Native `ChatRequest` (messages.rs) includes messages, model, system prompt, tools, response_format, temperature, max_tokens, etc. + - Requires Message conversion from above, plus ToolSpec and ResponseFormat mapping + +3. **`ChatResponse ↔ proto::ChatResponse`** (with ToolCall, Usage, Degradation): + - Native `ChatResponse` includes content, tool_calls, usage, degradation, model, stop_reason + - Requires ToolCall, Usage (updated for `optional` fields from Layer 1), and Degradation mapping + +4. **`HookResult native → proto`** (reverse of existing `grpc_hook.rs` conversion): + - `grpc_hook.rs` already has `proto_to_native_hook_result()`. Need the reverse: `native_to_proto_hook_result()` + - Needed for KernelService `EmitHook` and `EmitHookAndCollect` RPCs + +5. **Update existing `Usage` conversion** for `optional` fields from Layer 1 + +**Fix `GrpcContextBridge`** message conversion — now uses the proper Message ↔ proto conversion. + +**Fix `GrpcProviderBridge::complete()`** — currently a stub returning `Err(ProviderError::Other)`. Now possible with ChatRequest/ChatResponse conversions. + +### Layer 3: Session Routing & Bridge Fixes + +**Critical fix — `session_id` routing:** + +Store `session_id` on `GrpcOrchestratorBridge` struct at construction time. Cannot modify `Orchestrator` trait signature — that would be a breaking change affecting all orchestrator implementations. + +```rust +pub struct GrpcOrchestratorBridge { + client: tokio::sync::Mutex>, + session_id: String, // Set at construction +} +``` + +Populate `session_id` in `OrchestratorExecuteRequest`. This enables KernelService to route callbacks to the correct session's Coordinator. + +**5 discarded orchestrator parameters — by design:** + +The `Orchestrator::execute()` trait passes `context`, `providers`, `tools`, `hooks`, `coordinator` — but these can't be serialized over gRPC. Remote orchestrators access these via KernelService callbacks instead (which Layer 4 implements). Remove `TODO(grpc-v2)` markers, replace with clear doc comment: "Remote orchestrators access these via KernelService RPCs using session_id." The `log::debug!()` calls remain as operational telemetry. + +**Approval timeout fix:** + +After proto Layer 1 lands (`optional double timeout`), update `map_approval_timeout()`: + +- `None` → proto `None` (not `0.0`) +- `Some(0.0)` → proto `Some(0.0)` (expire immediately) +- `Some(30.0)` → proto `Some(30.0)` (30 second timeout) + +**Provider name fix:** + +In `get_messages_for_request()`, call `provider.name()` on the passed `Arc` and populate the `provider_name` field. + +### Layer 4: KernelService Implementation + +**Architecture:** Each `KernelServiceImpl` is scoped to one session's `Arc`. NOT a session registry HashMap. The kernel provides the mechanism (one service instance per coordinator); the app layer manages session multiplexing. + +**Prerequisite — Session Coordinator sharing:** + +- Change `Session` internal storage from `coordinator: Coordinator` to `coordinator: Arc` +- Add `coordinator_shared() -> Arc` method +- Keep existing `coordinator() -> &Coordinator` and `coordinator_mut() -> &mut Coordinator` working via Arc derefs / `Arc::get_mut()` (safe during setup when there's one ref) +- Document lifecycle constraint: `coordinator_mut()` only callable before `Arc` is shared + +**8 RPCs to implement, in priority order:** + +| Priority | RPC | Depends on | Effort | +|----------|-----|-----------|--------| +| 1 | `GetCapability` | Just coordinator access | Small | +| 1 | `RegisterCapability` | Just coordinator access | Small | +| 2 | `GetMountedModule` | Just coordinator access | Small | +| 3 | `AddMessage` | Layer 2 Message conversion | Medium | +| 3 | `GetMessages` | Layer 2 Message conversion | Medium | +| 4 | `EmitHook` | Layer 2 native→proto HookResult | Medium | +| 4 | `EmitHookAndCollect` | Same + timeout + collect semantics | Medium | +| 5 | `CompleteWithProvider` | Full ChatRequest/ChatResponse conversion | Large | +| 6 | `CompleteWithProviderStreaming` | Wrap single complete() as one-shot stream | Large | + +**Streaming approach:** `CompleteWithProviderStreaming` wraps a single `provider.complete()` call into one streamed chunk for now. True streaming requires a Provider trait change (`complete_stream()`) — tracked as separate future work. + +**Each RPC follows the same pattern:** + +1. Extract `session_id` from request +2. Use internal `Arc` (already scoped to this session) +3. Call the appropriate method on Coordinator/subsystem +4. Serialize response using Layer 2 conversions +5. Return `Result` + +## Data Flow + +**Outbound (kernel → remote orchestrator):** + +``` +Session.execute() + → GrpcOrchestratorBridge.execute(session_id, messages) + → Message → proto::Message conversion (Layer 2) + → OrchestratorExecuteRequest { session_id, messages, provider_name } + → gRPC call to remote orchestrator +``` + +**Inbound (remote orchestrator → kernel via KernelService):** + +``` +Remote orchestrator calls KernelService RPC (e.g., CompleteWithProvider) + → KernelServiceImpl receives request + → Uses scoped Arc (no session lookup needed) + → proto::ChatRequest → native ChatRequest (Layer 2) + → coordinator.providers.get(name).complete(request) + → native ChatResponse → proto::ChatResponse (Layer 2) + → gRPC response back to remote orchestrator +``` + +## Error Handling + +- **Session not found:** Not applicable — `KernelServiceImpl` is per-session, not a registry. If the session is gone, the gRPC connection is closed. +- **Provider not found:** `CompleteWithProvider` returns `Status::not_found` with the requested provider name. +- **Conversion failures:** `Status::internal` with descriptive message (e.g., "failed to deserialize Message from proto: missing role field"). +- **Coordinator method errors:** Map native error types to appropriate gRPC status codes (`InvalidArgument`, `NotFound`, `Internal`). +- **Timeout on `EmitHookAndCollect`:** Respect the timeout field from the request; return partial results if timeout expires. + +## Testing Strategy + +- **Proto schema:** Existing `proto-check.yml` CI workflow validates proto changes +- **Conversions:** Unit tests for each new bidirectional conversion (Message, ChatRequest, ChatResponse, HookResult) — roundtrip tests proving `native → proto → native` is lossless +- **Bridge fixes:** Update existing bridge tests that assert lossy behavior to assert full fidelity instead +- **Remove TODO-presence tests:** Tests in `grpc_orchestrator.rs:134` and `grpc_context.rs:290` that assert `TODO(grpc-v2)` markers exist — replace with fidelity tests +- **KernelService RPCs:** Integration tests per RPC — construct `KernelServiceImpl` with a test Coordinator, call RPC, verify response +- **End-to-end:** At least one test that exercises: create session → start KernelService → remote orchestrator calls back via KernelService → verify roundtrip + +## Scope & Boundaries + +**In scope:** + +- 5 proto `optional` field additions + regeneration +- All bidirectional conversions (Message, ChatRequest, ChatResponse, HookResult) +- Fix all 15 code `TODO(grpc-v2)` markers +- Fix `GrpcProviderBridge::complete()` stub +- Session coordinator sharing (`Arc`) +- All 8 KernelService RPC implementations +- Update/remove doc references to `TODO(grpc-v2)` where fixes land + +**Not in scope:** + +- Provider trait streaming extension (separate future PR) +- Multi-session multiplexing over single gRPC port (app-layer concern) +- `process_hook_result()` porting to Rust (tracked as Future TODO #2 from Phase 2) + +## Key Design Decisions + +1. **Single PR, layered commits** — changes are tightly coupled; intermediate states would be broken +2. **KernelServiceImpl stays per-session** — not a session registry; kernel provides mechanism, app provides policy +3. **Session stores `Arc`** — minimal change to enable sharing; existing API preserved +4. **`session_id` stored on bridge at construction** — not passed through Orchestrator trait (would be breaking change) +5. **Streaming RPC wraps single `complete()`** — true streaming deferred to Provider trait extension +6. **Type-safe Message parsing** — use `serde_json::from_value::()`, not hand-parsing JSON keys +7. **5 discarded orchestrator params remain discarded** — by design, remote orchestrators use KernelService callbacks + +## Open Questions + +None — all design points validated during brainstorm with core expert review. \ No newline at end of file diff --git a/docs/plans/2026-03-04-grpc-v2-debt-fix-implementation.md b/docs/plans/2026-03-04-grpc-v2-debt-fix-implementation.md new file mode 100644 index 0000000..729ba9b --- /dev/null +++ b/docs/plans/2026-03-04-grpc-v2-debt-fix-implementation.md @@ -0,0 +1,2992 @@ +# gRPC Phase 2 Debt Fix — Implementation Plan + +> **Execution:** Use the subagent-driven-development workflow to implement this plan. + +**Goal:** Fix all 15 `TODO(grpc-v2)` markers, implement all 8 stubbed KernelService RPCs, and make remote cross-language orchestrators fully functional. + +**Architecture:** Four layered changes working bottom-up: proto schema fixes → bidirectional type conversions → session routing & bridge fixes → KernelService RPC implementation. Each layer depends on the one below. Single PR, layered commits. + +**Tech Stack:** Rust, tonic (gRPC), prost (protobuf), serde_json, tokio + +**Design document:** `docs/plans/2026-03-04-grpc-v2-debt-fix-design.md` + +--- + +## Glossary (read this first) + +| Term | What it means | +|------|---------------| +| **proto** | The file `proto/amplifier_module.proto` — the source of truth for all gRPC types | +| **generated code** | `crates/amplifier-core/src/generated/amplifier.module.rs` — Rust structs auto-generated from proto by `tonic-build`. Committed to the repo. | +| **native types** | Hand-written Rust types in `messages.rs` and `models.rs` (e.g., `Message`, `ChatRequest`, `HookResult`) | +| **proto types** | The generated Rust types (e.g., `amplifier_module::Message`, `amplifier_module::ChatResponse`) | +| **bridge** | Code in `src/bridges/` that wraps a gRPC client behind a native Rust trait (e.g., `GrpcProviderBridge` implements `Provider`) | +| **conversion** | A `From for ProtoType` impl (or reverse) that maps between native and proto types | +| **KernelService** | A gRPC server the Rust kernel hosts — remote modules call back to it for provider access, tool execution, etc. | +| **Coordinator** | The central hub (`coordinator.rs`) that holds all mounted modules (providers, tools, hooks, context) | + +## File Map + +These are ALL the files you will touch. Read them before starting. + +| File | Role | +|------|------| +| `proto/amplifier_module.proto` | Proto schema — add `optional` keyword to 5 fields | +| `crates/amplifier-core/build.rs` | Proto code generation — no changes needed, just understand how it works | +| `crates/amplifier-core/src/generated/amplifier.module.rs` | Generated code — regenerated by `cargo build` when protoc is installed | +| `crates/amplifier-core/src/generated/conversions.rs` | Existing conversions (ToolResult, ModelInfo, Usage) — modify Usage, add new conversions | +| `crates/amplifier-core/src/generated/mod.rs` | Module declarations for generated code — no changes needed | +| `crates/amplifier-core/src/messages.rs` | Native Message, ChatRequest, ChatResponse, ContentBlock, Role types — read only | +| `crates/amplifier-core/src/models.rs` | Native HookResult, HookAction, ToolResult, ToolSpec types — read only | +| `crates/amplifier-core/src/traits.rs` | 6 module traits — read only, do NOT modify | +| `crates/amplifier-core/src/errors.rs` | Error types — read only | +| `crates/amplifier-core/src/bridges/grpc_context.rs` | Context bridge — rewrite `value_to_proto_message` / `proto_message_to_value` | +| `crates/amplifier-core/src/bridges/grpc_approval.rs` | Approval bridge — fix `map_approval_timeout` | +| `crates/amplifier-core/src/bridges/grpc_orchestrator.rs` | Orchestrator bridge — add `session_id`, document discarded params | +| `crates/amplifier-core/src/bridges/grpc_provider.rs` | Provider bridge — implement `complete()` stub | +| `crates/amplifier-core/src/bridges/grpc_hook.rs` | Hook bridge — add reverse `native_to_proto_hook_result()` | +| `crates/amplifier-core/src/grpc_server.rs` | KernelService — implement 8 stubbed RPCs | +| `crates/amplifier-core/src/session.rs` | Session — change to `Arc`, add `coordinator_shared()` | +| `crates/amplifier-core/src/coordinator.rs` | Coordinator — read only, understand its API | +| `crates/amplifier-core/src/hooks.rs` | HookRegistry — read only, understand `emit()` and `emit_and_collect()` signatures | + +--- + +## Task 0: Proto Schema — Add `optional` to 5 Fields + +**Files:** +- Modify: `proto/amplifier_module.proto` (lines 301-308, 432, 400) +- Modify: `crates/amplifier-core/src/generated/amplifier.module.rs` (auto-regenerated) +- Modify: `crates/amplifier-core/src/generated/conversions.rs` (lines 146-151, 157-182) +- Modify: `crates/amplifier-core/src/bridges/grpc_approval.rs` (lines 32-49, 83-88) +- Modify: `crates/amplifier-core/src/bridges/grpc_hook.rs` (lines 180-181, 237-254) +- Test: inline `#[cfg(test)]` in `conversions.rs` and `grpc_approval.rs` + +### Step 1: Edit proto — add `optional` to 5 fields + +Open `proto/amplifier_module.proto`. Make these exact changes: + +In the `Usage` message (around line 301): +```protobuf +message Usage { + int32 prompt_tokens = 1; + int32 completion_tokens = 2; + int32 total_tokens = 3; + optional int32 reasoning_tokens = 4; + optional int32 cache_read_tokens = 5; + optional int32 cache_creation_tokens = 6; +} +``` + +In the `ApprovalRequest` message (around line 427): +```protobuf +message ApprovalRequest { + string tool_name = 1; + string action = 2; + string details_json = 3; + string risk_level = 4; + optional double timeout = 5; +} +``` + +In the `HookResult` message (around line 390): +```protobuf + // Change line 400 from: + // double approval_timeout = 9; + // to: + optional double approval_timeout = 9; +``` + +### Step 2: Regenerate Rust code + +Run: +```bash +cd crates/amplifier-core && cargo build 2>&1 | head -40 +``` + +Expected: Build succeeds if protoc is installed. The file `src/generated/amplifier.module.rs` will be updated. The 5 fields will now be `Option` / `Option` in the generated Rust code instead of bare `i32` / `f64`. + +**If protoc is NOT installed:** You'll see the warning `protoc not found — using pre-committed generated stubs`. In that case, you must manually edit `src/generated/amplifier.module.rs` to change the 5 field types. Search for the struct definitions and change: +- `pub reasoning_tokens: i32` → `pub reasoning_tokens: Option` +- `pub cache_read_tokens: i32` → `pub cache_read_tokens: Option` +- `pub cache_creation_tokens: i32` → `pub cache_creation_tokens: Option` +- In `ApprovalRequest`: `pub timeout: f64` → `pub timeout: Option` +- In `HookResult`: `pub approval_timeout: f64` → `pub approval_timeout: Option` + +### Step 3: Fix compile errors in conversions.rs + +After regeneration, `cargo build` will fail because the generated types changed from bare scalars to `Option<>`. Fix `crates/amplifier-core/src/generated/conversions.rs`: + +**Native → proto direction** (the `From for super::amplifier_module::Usage` impl, around line 119): + +Replace: +```rust + // TODO(grpc-v2): proto uses bare int32 — Some(0) and None are indistinguishable + reasoning_tokens: native.reasoning_tokens.unwrap_or(0) as i32, + // TODO(grpc-v2): proto uses bare int32 — Some(0) and None are indistinguishable + cache_read_tokens: native.cache_read_tokens.unwrap_or(0) as i32, + // TODO(grpc-v2): proto uses bare int32 — Some(0) and None are indistinguishable + cache_creation_tokens: native.cache_write_tokens.unwrap_or(0) as i32, +``` + +With: +```rust + reasoning_tokens: native.reasoning_tokens.map(|v| v as i32), + cache_read_tokens: native.cache_read_tokens.map(|v| v as i32), + cache_creation_tokens: native.cache_write_tokens.map(|v| v as i32), +``` + +**Proto → native direction** (the `From for crate::messages::Usage` impl, around line 156): + +Replace: +```rust + reasoning_tokens: if proto.reasoning_tokens == 0 { + None + } else { + Some(i64::from(proto.reasoning_tokens)) + }, + cache_read_tokens: if proto.cache_read_tokens == 0 { + None + } else { + Some(i64::from(proto.cache_read_tokens)) + }, + cache_write_tokens: if proto.cache_creation_tokens == 0 { + None + } else { + Some(i64::from(proto.cache_creation_tokens)) + }, +``` + +With: +```rust + reasoning_tokens: proto.reasoning_tokens.map(i64::from), + cache_read_tokens: proto.cache_read_tokens.map(i64::from), + cache_write_tokens: proto.cache_creation_tokens.map(i64::from), +``` + +### Step 4: Fix compile errors in grpc_approval.rs + +The `ApprovalRequest` proto struct's `timeout` field is now `Option`. Fix `crates/amplifier-core/src/bridges/grpc_approval.rs`: + +Replace the entire `map_approval_timeout` function and the TODO comment above it (lines 32-49): +```rust +// Approval timeout is now properly represented as optional double in proto. +// None = no timeout (wait indefinitely), Some(0.0) = expire immediately. +``` + +Then in the `request_approval` method (around line 88), change: +```rust + timeout: map_approval_timeout(request.timeout), +``` +to: +```rust + timeout: request.timeout, +``` + +### Step 5: Fix compile errors in grpc_hook.rs + +The `HookResult` proto struct's `approval_timeout` field is now `Option`. Fix `crates/amplifier-core/src/bridges/grpc_hook.rs`: + +In `proto_to_native_hook_result` (around line 181), change: +```rust + approval_timeout: proto.approval_timeout, +``` +to: +```rust + approval_timeout: proto.approval_timeout.unwrap_or(300.0), +``` + +Also fix the `default_proto_hook_result()` test helper (around line 247). Change: +```rust + approval_timeout: 0.0, +``` +to: +```rust + approval_timeout: None, +``` + +### Step 6: Build and verify all compile errors are fixed + +Run: +```bash +cd crates/amplifier-core && cargo build 2>&1 +``` +Expected: Build succeeds with no errors. + +### Step 7: Update tests in conversions.rs + +The existing `usage_roundtrip` test (line 231) asserts `cache_write_tokens` roundtrips as `None → 0 → None`. With `optional`, it now roundtrips as `None → None → None` directly. The test should still pass without changes because the assertion is `assert_eq!(restored.cache_write_tokens, None)` which is correct either way. + +Run the existing tests: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests --nocapture 2>&1 +``` +Expected: All 9 conversions tests pass. + +### Step 8: Add a test for Usage optional fields with Some(0) + +Add this test to `crates/amplifier-core/src/generated/conversions.rs` inside the `mod tests` block, after the existing `usage_with_all_optional_tokens` test: + +```rust + #[test] + fn usage_some_zero_roundtrips_correctly() { + // With optional proto fields, Some(0) is now distinguishable from None + let original = crate::messages::Usage { + input_tokens: 100, + output_tokens: 50, + total_tokens: 150, + reasoning_tokens: Some(0), + cache_read_tokens: None, + cache_write_tokens: Some(0), + extensions: HashMap::new(), + }; + let proto: super::super::amplifier_module::Usage = original.clone().into(); + let restored: crate::messages::Usage = proto.into(); + assert_eq!(restored.reasoning_tokens, Some(0), "Some(0) must survive roundtrip"); + assert_eq!(restored.cache_read_tokens, None, "None must survive roundtrip"); + assert_eq!(restored.cache_write_tokens, Some(0), "Some(0) must survive roundtrip"); + } +``` + +### Step 9: Update tests in grpc_approval.rs + +Replace the two timeout tests (around line 133) with tests that reflect the new optional semantics: + +Replace: +```rust + #[test] + fn none_timeout_defaults_to_zero() { + // When timeout is None, the wire value should be 0.0. + let timeout: Option = None; + let result = map_approval_timeout(timeout); + assert!((result - 0.0).abs() < f64::EPSILON); + } + + #[test] + fn some_timeout_is_preserved() { + let timeout: Option = Some(30.0); + let result = map_approval_timeout(timeout); + assert!((result - 30.0).abs() < f64::EPSILON); + } +``` + +With: +```rust + #[test] + fn approval_timeout_none_maps_to_proto_none() { + // With optional proto field, None timeout is properly represented + let native_timeout: Option = None; + // Proto field is also Option, so None maps directly + assert!(native_timeout.is_none()); + } + + #[test] + fn approval_timeout_some_maps_to_proto_some() { + let native_timeout: Option = Some(30.0); + assert_eq!(native_timeout, Some(30.0)); + } + + #[test] + fn approval_timeout_some_zero_is_distinguishable_from_none() { + let none_timeout: Option = None; + let zero_timeout: Option = Some(0.0); + assert_ne!(none_timeout, zero_timeout, "None (wait forever) != Some(0.0) (expire immediately)"); + } +``` + +### Step 10: Run all tests and verify + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core --verbose 2>&1 | tail -40 +``` +Expected: All tests pass. + +### Step 11: Run clippy + +Run: +```bash +cd crates/amplifier-core && cargo clippy -p amplifier-core -- -D warnings 2>&1 +``` +Expected: No warnings or errors. + +### Step 12: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add proto/amplifier_module.proto crates/amplifier-core/src/generated/ crates/amplifier-core/src/bridges/grpc_approval.rs crates/amplifier-core/src/bridges/grpc_hook.rs && git commit -m "fix(grpc): add optional keyword to 5 proto fields for None/zero disambiguation + +- Usage: reasoning_tokens, cache_read_tokens, cache_creation_tokens +- ApprovalRequest: timeout +- HookResult: approval_timeout +- Update conversions.rs to use Option mapping instead of unwrap_or(0) +- Remove map_approval_timeout workaround in grpc_approval.rs +- Fix grpc_hook.rs to default to 300.0 when approval_timeout is None" +``` + +--- + +## Task 1: Role Enum Conversion Helpers + +**Files:** +- Modify: `crates/amplifier-core/src/generated/conversions.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Write failing tests + +Add these tests inside the `mod tests` block in `crates/amplifier-core/src/generated/conversions.rs`: + +```rust + // -- Role conversions -- + + #[test] + fn native_role_to_proto_role_all_variants() { + use crate::messages::Role; + let cases = vec![ + (Role::System, super::super::amplifier_module::Role::System as i32), + (Role::User, super::super::amplifier_module::Role::User as i32), + (Role::Assistant, super::super::amplifier_module::Role::Assistant as i32), + (Role::Tool, super::super::amplifier_module::Role::Tool as i32), + (Role::Function, super::super::amplifier_module::Role::Function as i32), + (Role::Developer, super::super::amplifier_module::Role::Developer as i32), + ]; + for (native, expected_i32) in cases { + let proto_i32: i32 = super::native_role_to_proto(native.clone()); + assert_eq!(proto_i32, expected_i32, "failed for {:?}", native); + } + } + + #[test] + fn proto_role_to_native_role_all_variants() { + use crate::messages::Role; + let cases = vec![ + (super::super::amplifier_module::Role::System as i32, Role::System), + (super::super::amplifier_module::Role::User as i32, Role::User), + (super::super::amplifier_module::Role::Assistant as i32, Role::Assistant), + (super::super::amplifier_module::Role::Tool as i32, Role::Tool), + (super::super::amplifier_module::Role::Function as i32, Role::Function), + (super::super::amplifier_module::Role::Developer as i32, Role::Developer), + ]; + for (proto_i32, expected) in cases { + let native = super::proto_role_to_native(proto_i32); + assert_eq!(native, expected, "failed for proto i32 {}", proto_i32); + } + } + + #[test] + fn proto_role_unspecified_defaults_to_user() { + use crate::messages::Role; + assert_eq!(super::proto_role_to_native(0), Role::User); + } + + #[test] + fn proto_role_unknown_defaults_to_user() { + use crate::messages::Role; + assert_eq!(super::proto_role_to_native(99), Role::User); + } +``` + +### Step 2: Run tests to verify they fail + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests::native_role 2>&1 +``` +Expected: FAIL — functions `native_role_to_proto` and `proto_role_to_native` don't exist yet. + +### Step 3: Implement the role conversion helpers + +Add these two public functions to `crates/amplifier-core/src/generated/conversions.rs`, above the `#[cfg(test)]` block: + +```rust +// --------------------------------------------------------------------------- +// Role conversions +// --------------------------------------------------------------------------- + +/// Convert a native [`Role`] to a proto `Role` enum i32 value. +pub fn native_role_to_proto(role: crate::messages::Role) -> i32 { + match role { + crate::messages::Role::System => super::amplifier_module::Role::System as i32, + crate::messages::Role::User => super::amplifier_module::Role::User as i32, + crate::messages::Role::Assistant => super::amplifier_module::Role::Assistant as i32, + crate::messages::Role::Tool => super::amplifier_module::Role::Tool as i32, + crate::messages::Role::Function => super::amplifier_module::Role::Function as i32, + crate::messages::Role::Developer => super::amplifier_module::Role::Developer as i32, + } +} + +/// Convert a proto `Role` i32 value to a native [`Role`]. +/// +/// Unknown or unspecified values default to `Role::User`. +pub fn proto_role_to_native(proto_role: i32) -> crate::messages::Role { + match super::amplifier_module::Role::try_from(proto_role) { + Ok(super::amplifier_module::Role::System) => crate::messages::Role::System, + Ok(super::amplifier_module::Role::User) => crate::messages::Role::User, + Ok(super::amplifier_module::Role::Assistant) => crate::messages::Role::Assistant, + Ok(super::amplifier_module::Role::Tool) => crate::messages::Role::Tool, + Ok(super::amplifier_module::Role::Function) => crate::messages::Role::Function, + Ok(super::amplifier_module::Role::Developer) => crate::messages::Role::Developer, + Ok(super::amplifier_module::Role::Unspecified) | Err(_) => { + if proto_role != 0 { + log::warn!("Unknown proto Role value {}, defaulting to User", proto_role); + } + crate::messages::Role::User + } + } +} +``` + +### Step 4: Run tests to verify they pass + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests::native_role conversions::tests::proto_role --nocapture 2>&1 +``` +Expected: All 4 tests pass. + +### Step 5: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/generated/conversions.rs && git commit -m "feat(grpc): add Role enum bidirectional conversion helpers" +``` + +--- + +## Task 2: Message ↔ Proto Message Conversion + +**Files:** +- Modify: `crates/amplifier-core/src/generated/conversions.rs` +- Test: inline `#[cfg(test)]` in same file + +This is the biggest single conversion — it handles all 7 ContentBlock variants, role, name, tool_call_id, and metadata. + +### Step 1: Write failing tests + +Add these tests inside `mod tests` in `crates/amplifier-core/src/generated/conversions.rs`: + +```rust + // -- Message conversions -- + + #[test] + fn message_text_content_roundtrip() { + use crate::messages::{Message, MessageContent, Role}; + + let original = Message { + role: Role::User, + content: MessageContent::Text("hello world".into()), + name: Some("alice".into()), + tool_call_id: None, + metadata: Some(HashMap::from([("key".to_string(), serde_json::json!("val"))])), + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(&original); + let restored = super::proto_message_to_native(&proto).expect("conversion should succeed"); + assert_eq!(restored.role, Role::User); + assert_eq!(restored.name, Some("alice".into())); + assert_eq!(restored.metadata, original.metadata); + match &restored.content { + MessageContent::Text(t) => assert_eq!(t, "hello world"), + other => panic!("expected Text, got {:?}", other), + } + } + + #[test] + fn message_block_content_text_roundtrip() { + use crate::messages::{ContentBlock, Message, MessageContent, Role}; + + let original = Message { + role: Role::Assistant, + content: MessageContent::Blocks(vec![ContentBlock::Text { + text: "thinking...".into(), + visibility: None, + extensions: HashMap::new(), + }]), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(&original); + let restored = super::proto_message_to_native(&proto).expect("conversion should succeed"); + assert_eq!(restored.role, Role::Assistant); + match &restored.content { + MessageContent::Blocks(blocks) => { + assert_eq!(blocks.len(), 1); + match &blocks[0] { + ContentBlock::Text { text, .. } => assert_eq!(text, "thinking..."), + other => panic!("expected Text block, got {:?}", other), + } + } + other => panic!("expected Blocks, got {:?}", other), + } + } + + #[test] + fn message_with_tool_call_id_roundtrip() { + use crate::messages::{Message, MessageContent, Role}; + + let original = Message { + role: Role::Tool, + content: MessageContent::Text("result data".into()), + name: Some("read_file".into()), + tool_call_id: Some("call_abc123".into()), + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_message_to_proto(&original); + let restored = super::proto_message_to_native(&proto).expect("conversion should succeed"); + assert_eq!(restored.role, Role::Tool); + assert_eq!(restored.tool_call_id, Some("call_abc123".into())); + assert_eq!(restored.name, Some("read_file".into())); + } + + #[test] + fn message_none_content_returns_error() { + let proto = super::super::amplifier_module::Message { + role: 2, // USER + content: None, + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }; + let result = super::proto_message_to_native(&proto); + assert!(result.is_err(), "None content should produce an error"); + } +``` + +### Step 2: Run tests to verify they fail + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests::message_ 2>&1 +``` +Expected: FAIL — `native_message_to_proto` and `proto_message_to_native` don't exist yet. + +### Step 3: Implement message conversion functions + +Add these functions to `crates/amplifier-core/src/generated/conversions.rs`, after the Role conversion functions: + +```rust +// --------------------------------------------------------------------------- +// ContentBlock conversions +// --------------------------------------------------------------------------- + +/// Convert a native [`ContentBlock`] to a proto `ContentBlock`. +fn native_content_block_to_proto( + block: &crate::messages::ContentBlock, +) -> super::amplifier_module::ContentBlock { + use crate::messages::ContentBlock; + let (proto_block, visibility) = match block { + ContentBlock::Text { + text, visibility, .. + } => ( + super::amplifier_module::content_block::Block::TextBlock( + super::amplifier_module::TextBlock { text: text.clone() }, + ), + visibility, + ), + ContentBlock::Thinking { + thinking, + signature, + content, + visibility, + .. + } => ( + super::amplifier_module::content_block::Block::ThinkingBlock( + super::amplifier_module::ThinkingBlock { + thinking: thinking.clone(), + signature: signature.clone().unwrap_or_default(), + content: content + .as_ref() + .map(|c| { + c.iter() + .filter_map(|v| v.as_str().map(String::from)) + .collect() + }) + .unwrap_or_default(), + }, + ), + visibility, + ), + ContentBlock::RedactedThinking { + data, visibility, .. + } => ( + super::amplifier_module::content_block::Block::RedactedThinkingBlock( + super::amplifier_module::RedactedThinkingBlock { data: data.clone() }, + ), + visibility, + ), + ContentBlock::ToolCall { + id, + name, + input, + visibility, + .. + } => ( + super::amplifier_module::content_block::Block::ToolCallBlock( + super::amplifier_module::ToolCallBlock { + id: id.clone(), + name: name.clone(), + input_json: serde_json::to_string(input).unwrap_or_default(), + }, + ), + visibility, + ), + ContentBlock::ToolResult { + tool_call_id, + output, + visibility, + .. + } => ( + super::amplifier_module::content_block::Block::ToolResultBlock( + super::amplifier_module::ToolResultBlock { + tool_call_id: tool_call_id.clone(), + output_json: serde_json::to_string(output).unwrap_or_default(), + }, + ), + visibility, + ), + ContentBlock::Image { + source, + visibility, + .. + } => ( + super::amplifier_module::content_block::Block::ImageBlock( + super::amplifier_module::ImageBlock { + media_type: source + .get("media_type") + .and_then(|v| v.as_str()) + .unwrap_or_default() + .to_string(), + data: source + .get("data") + .and_then(|v| v.as_str()) + .map(|s| s.as_bytes().to_vec()) + .unwrap_or_default(), + source_json: serde_json::to_string(source).unwrap_or_default(), + }, + ), + visibility, + ), + ContentBlock::Reasoning { + content, + summary, + visibility, + .. + } => ( + super::amplifier_module::content_block::Block::ReasoningBlock( + super::amplifier_module::ReasoningBlock { + content: content + .iter() + .filter_map(|v| v.as_str().map(String::from)) + .collect(), + summary: summary + .iter() + .filter_map(|v| v.as_str().map(String::from)) + .collect(), + }, + ), + visibility, + ), + }; + let proto_visibility = match visibility { + Some(crate::messages::Visibility::Internal) => { + super::amplifier_module::Visibility::LlmOnly as i32 + } + Some(crate::messages::Visibility::User) => { + super::amplifier_module::Visibility::UserOnly as i32 + } + Some(crate::messages::Visibility::Developer) => { + super::amplifier_module::Visibility::All as i32 + } + None => super::amplifier_module::Visibility::Unspecified as i32, + }; + super::amplifier_module::ContentBlock { + block: Some(proto_block), + visibility: proto_visibility, + } +} + +/// Convert a proto `ContentBlock` to a native [`ContentBlock`]. +fn proto_content_block_to_native( + proto: &super::amplifier_module::ContentBlock, +) -> Option { + use crate::messages::{ContentBlock, Visibility}; + let visibility = match super::amplifier_module::Visibility::try_from(proto.visibility) { + Ok(super::amplifier_module::Visibility::LlmOnly) => Some(Visibility::Internal), + Ok(super::amplifier_module::Visibility::UserOnly) => Some(Visibility::User), + Ok(super::amplifier_module::Visibility::All) => Some(Visibility::Developer), + _ => None, + }; + match &proto.block { + Some(super::amplifier_module::content_block::Block::TextBlock(b)) => { + Some(ContentBlock::Text { + text: b.text.clone(), + visibility, + extensions: HashMap::new(), + }) + } + Some(super::amplifier_module::content_block::Block::ThinkingBlock(b)) => { + Some(ContentBlock::Thinking { + thinking: b.thinking.clone(), + signature: if b.signature.is_empty() { + None + } else { + Some(b.signature.clone()) + }, + visibility, + content: if b.content.is_empty() { + None + } else { + Some( + b.content + .iter() + .map(|s| serde_json::Value::String(s.clone())) + .collect(), + ) + }, + extensions: HashMap::new(), + }) + } + Some(super::amplifier_module::content_block::Block::RedactedThinkingBlock(b)) => { + Some(ContentBlock::RedactedThinking { + data: b.data.clone(), + visibility, + extensions: HashMap::new(), + }) + } + Some(super::amplifier_module::content_block::Block::ToolCallBlock(b)) => { + let input = serde_json::from_str(&b.input_json).unwrap_or_default(); + Some(ContentBlock::ToolCall { + id: b.id.clone(), + name: b.name.clone(), + input, + visibility, + extensions: HashMap::new(), + }) + } + Some(super::amplifier_module::content_block::Block::ToolResultBlock(b)) => { + let output = serde_json::from_str(&b.output_json) + .unwrap_or(serde_json::Value::String(b.output_json.clone())); + Some(ContentBlock::ToolResult { + tool_call_id: b.tool_call_id.clone(), + output, + visibility, + extensions: HashMap::new(), + }) + } + Some(super::amplifier_module::content_block::Block::ImageBlock(b)) => { + let source = if b.source_json.is_empty() { + HashMap::new() + } else { + serde_json::from_str(&b.source_json).unwrap_or_default() + }; + Some(ContentBlock::Image { + source, + visibility, + extensions: HashMap::new(), + }) + } + Some(super::amplifier_module::content_block::Block::ReasoningBlock(b)) => { + Some(ContentBlock::Reasoning { + content: b + .content + .iter() + .map(|s| serde_json::Value::String(s.clone())) + .collect(), + summary: b + .summary + .iter() + .map(|s| serde_json::Value::String(s.clone())) + .collect(), + visibility, + extensions: HashMap::new(), + }) + } + None => None, + } +} + +// --------------------------------------------------------------------------- +// Message conversions +// --------------------------------------------------------------------------- + +/// Convert a native [`Message`] to a proto `Message`. +pub fn native_message_to_proto( + msg: &crate::messages::Message, +) -> super::amplifier_module::Message { + use crate::messages::MessageContent; + + let content = match &msg.content { + MessageContent::Text(text) => { + Some(super::amplifier_module::message::Content::TextContent(text.clone())) + } + MessageContent::Blocks(blocks) => { + let proto_blocks: Vec = + blocks.iter().map(native_content_block_to_proto).collect(); + Some(super::amplifier_module::message::Content::BlockContent( + super::amplifier_module::ContentBlockList { + blocks: proto_blocks, + }, + )) + } + }; + + let metadata_json = msg + .metadata + .as_ref() + .map(|m| serde_json::to_string(m).unwrap_or_default()) + .unwrap_or_default(); + + super::amplifier_module::Message { + role: native_role_to_proto(msg.role.clone()), + content, + name: msg.name.clone().unwrap_or_default(), + tool_call_id: msg.tool_call_id.clone().unwrap_or_default(), + metadata_json, + } +} + +/// Convert a proto `Message` to a native [`Message`]. +/// +/// Returns `Err` if the proto message has no content. +pub fn proto_message_to_native( + proto: &super::amplifier_module::Message, +) -> Result { + use crate::messages::{Message, MessageContent}; + + let content = match &proto.content { + Some(super::amplifier_module::message::Content::TextContent(text)) => { + MessageContent::Text(text.clone()) + } + Some(super::amplifier_module::message::Content::BlockContent(block_list)) => { + let blocks: Vec = block_list + .blocks + .iter() + .filter_map(proto_content_block_to_native) + .collect(); + MessageContent::Blocks(blocks) + } + None => { + return Err("proto Message has no content".to_string()); + } + }; + + let name = if proto.name.is_empty() { + None + } else { + Some(proto.name.clone()) + }; + + let tool_call_id = if proto.tool_call_id.is_empty() { + None + } else { + Some(proto.tool_call_id.clone()) + }; + + let metadata: Option> = if proto.metadata_json.is_empty() { + None + } else { + serde_json::from_str(&proto.metadata_json) + .map_err(|e| { + log::warn!("Failed to parse message metadata_json: {e}"); + e + }) + .ok() + }; + + Ok(Message { + role: proto_role_to_native(proto.role), + content, + name, + tool_call_id, + metadata, + extensions: HashMap::new(), + }) +} +``` + +### Step 4: Run tests to verify they pass + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests::message_ --nocapture 2>&1 +``` +Expected: All 4 message tests pass. + +### Step 5: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/generated/conversions.rs && git commit -m "feat(grpc): add Message ↔ proto Message bidirectional conversion + +Handles all 7 ContentBlock variants (text, thinking, redacted_thinking, +tool_call, tool_result, image, reasoning), role mapping, name, +tool_call_id, and metadata_json serialization." +``` + +--- + +## Task 3: ChatRequest ↔ Proto ChatRequest Conversion + +**Files:** +- Modify: `crates/amplifier-core/src/generated/conversions.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Write failing tests + +Add to `mod tests` in `conversions.rs`: + +```rust + // -- ChatRequest conversions -- + + #[test] + fn chat_request_minimal_roundtrip() { + use crate::messages::{ChatRequest, Message, MessageContent, Role}; + + let original = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("hello".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: Some("gpt-4".into()), + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: HashMap::new(), + }; + let proto = super::native_chat_request_to_proto(&original); + assert_eq!(proto.model, "gpt-4"); + assert_eq!(proto.messages.len(), 1); + + let restored = super::proto_chat_request_to_native(&proto).expect("should succeed"); + assert_eq!(restored.model, Some("gpt-4".into())); + assert_eq!(restored.messages.len(), 1); + } + + #[test] + fn chat_request_with_tools_roundtrip() { + use crate::messages::{ChatRequest, Message, MessageContent, Role, ToolSpec}; + + let original = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("search for rust".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: HashMap::new(), + }], + tools: Some(vec![ToolSpec { + name: "search".into(), + parameters: HashMap::from([("type".to_string(), serde_json::json!("object"))]), + description: Some("Search the web".into()), + extensions: HashMap::new(), + }]), + response_format: None, + temperature: Some(0.7), + top_p: None, + max_output_tokens: Some(4096), + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: Some(vec!["END".into()]), + reasoning_effort: Some("high".into()), + timeout: Some(30.0), + extensions: HashMap::new(), + }; + let proto = super::native_chat_request_to_proto(&original); + assert_eq!(proto.tools.len(), 1); + assert_eq!(proto.tools[0].name, "search"); + assert!((proto.temperature - 0.7).abs() < f64::EPSILON); + assert_eq!(proto.max_output_tokens, 4096); + + let restored = super::proto_chat_request_to_native(&proto).expect("should succeed"); + assert_eq!(restored.tools.as_ref().unwrap().len(), 1); + assert_eq!(restored.tools.as_ref().unwrap()[0].name, "search"); + assert_eq!(restored.temperature, Some(0.7)); + assert_eq!(restored.stop, Some(vec!["END".into()])); + } +``` + +### Step 2: Run tests to verify they fail + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests::chat_request 2>&1 +``` +Expected: FAIL — functions don't exist yet. + +### Step 3: Implement ChatRequest conversion + +Add to `conversions.rs`, after the Message conversion functions: + +```rust +// --------------------------------------------------------------------------- +// ChatRequest conversions +// --------------------------------------------------------------------------- + +/// Convert a native [`ChatRequest`] to a proto `ChatRequest`. +pub fn native_chat_request_to_proto( + req: &crate::messages::ChatRequest, +) -> super::amplifier_module::ChatRequest { + let messages: Vec = + req.messages.iter().map(native_message_to_proto).collect(); + + let tools: Vec = req + .tools + .as_ref() + .map(|ts| { + ts.iter() + .map(|t| super::amplifier_module::ToolSpecProto { + name: t.name.clone(), + description: t.description.clone().unwrap_or_default(), + parameters_json: serde_json::to_string(&t.parameters).unwrap_or_default(), + }) + .collect() + }) + .unwrap_or_default(); + + let response_format = req.response_format.as_ref().map(|rf| { + use crate::messages::ResponseFormat; + match rf { + ResponseFormat::Text => super::amplifier_module::ResponseFormat { + format: Some(super::amplifier_module::response_format::Format::Text(true)), + }, + ResponseFormat::Json => super::amplifier_module::ResponseFormat { + format: Some(super::amplifier_module::response_format::Format::Json(true)), + }, + ResponseFormat::JsonSchema { schema, strict } => { + super::amplifier_module::ResponseFormat { + format: Some( + super::amplifier_module::response_format::Format::JsonSchema( + super::amplifier_module::JsonSchemaFormat { + schema_json: serde_json::to_string(schema).unwrap_or_default(), + strict: strict.unwrap_or(false), + }, + ), + ), + } + } + } + }); + + let tool_choice = req + .tool_choice + .as_ref() + .map(|tc| match tc { + crate::messages::ToolChoice::String(s) => s.clone(), + crate::messages::ToolChoice::Object(o) => { + serde_json::to_string(o).unwrap_or_default() + } + }) + .unwrap_or_default(); + + let metadata_json = req + .metadata + .as_ref() + .map(|m| serde_json::to_string(m).unwrap_or_default()) + .unwrap_or_default(); + + super::amplifier_module::ChatRequest { + messages, + tools, + response_format, + temperature: req.temperature.unwrap_or(0.0), + top_p: req.top_p.unwrap_or(0.0), + max_output_tokens: req.max_output_tokens.unwrap_or(0) as i32, + conversation_id: req.conversation_id.clone().unwrap_or_default(), + stream: req.stream.unwrap_or(false), + metadata_json, + model: req.model.clone().unwrap_or_default(), + tool_choice, + stop: req.stop.clone().unwrap_or_default(), + reasoning_effort: req.reasoning_effort.clone().unwrap_or_default(), + timeout: req.timeout.unwrap_or(0.0), + } +} + +/// Convert a proto `ChatRequest` to a native [`ChatRequest`]. +pub fn proto_chat_request_to_native( + proto: &super::amplifier_module::ChatRequest, +) -> Result { + use crate::messages::{ChatRequest, ResponseFormat, ToolChoice, ToolSpec}; + + let messages: Result, _> = proto.messages.iter().map(proto_message_to_native).collect(); + let messages = messages?; + + let tools = if proto.tools.is_empty() { + None + } else { + Some( + proto + .tools + .iter() + .map(|t| { + let parameters = serde_json::from_str(&t.parameters_json).unwrap_or_default(); + ToolSpec { + name: t.name.clone(), + parameters, + description: if t.description.is_empty() { + None + } else { + Some(t.description.clone()) + }, + extensions: HashMap::new(), + } + }) + .collect(), + ) + }; + + let response_format = proto.response_format.as_ref().and_then(|rf| { + match &rf.format { + Some(super::amplifier_module::response_format::Format::Text(_)) => { + Some(ResponseFormat::Text) + } + Some(super::amplifier_module::response_format::Format::Json(_)) => { + Some(ResponseFormat::Json) + } + Some(super::amplifier_module::response_format::Format::JsonSchema(js)) => { + let schema = serde_json::from_str(&js.schema_json).unwrap_or_default(); + Some(ResponseFormat::JsonSchema { + schema, + strict: if js.strict { Some(true) } else { None }, + }) + } + None => None, + } + }); + + let tool_choice = if proto.tool_choice.is_empty() { + None + } else { + Some(ToolChoice::String(proto.tool_choice.clone())) + }; + + let metadata = if proto.metadata_json.is_empty() { + None + } else { + serde_json::from_str(&proto.metadata_json).ok() + }; + + Ok(ChatRequest { + messages, + tools, + response_format, + temperature: if proto.temperature == 0.0 { None } else { Some(proto.temperature) }, + top_p: if proto.top_p == 0.0 { None } else { Some(proto.top_p) }, + max_output_tokens: if proto.max_output_tokens == 0 { + None + } else { + Some(i64::from(proto.max_output_tokens)) + }, + conversation_id: if proto.conversation_id.is_empty() { + None + } else { + Some(proto.conversation_id.clone()) + }, + stream: if proto.stream { Some(true) } else { None }, + metadata, + model: if proto.model.is_empty() { + None + } else { + Some(proto.model.clone()) + }, + tool_choice, + stop: if proto.stop.is_empty() { + None + } else { + Some(proto.stop.clone()) + }, + reasoning_effort: if proto.reasoning_effort.is_empty() { + None + } else { + Some(proto.reasoning_effort.clone()) + }, + timeout: if proto.timeout == 0.0 { + None + } else { + Some(proto.timeout) + }, + extensions: HashMap::new(), + }) +} +``` + +### Step 4: Run tests to verify they pass + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests::chat_request --nocapture 2>&1 +``` +Expected: Both tests pass. + +### Step 5: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/generated/conversions.rs && git commit -m "feat(grpc): add ChatRequest ↔ proto ChatRequest bidirectional conversion + +Includes ToolSpec, ResponseFormat, ToolChoice, and all scalar fields." +``` + +--- + +## Task 4: ChatResponse ↔ Proto ChatResponse Conversion + +**Files:** +- Modify: `crates/amplifier-core/src/generated/conversions.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Write failing tests + +Add to `mod tests` in `conversions.rs`: + +```rust + // -- ChatResponse conversions -- + + #[test] + fn chat_response_roundtrip() { + use crate::messages::{ChatResponse, ContentBlock, Usage}; + + let original = ChatResponse { + content: vec![ContentBlock::Text { + text: "Hello!".into(), + visibility: None, + extensions: HashMap::new(), + }], + tool_calls: None, + usage: Some(Usage { + input_tokens: 100, + output_tokens: 50, + total_tokens: 150, + reasoning_tokens: None, + cache_read_tokens: None, + cache_write_tokens: None, + extensions: HashMap::new(), + }), + degradation: None, + finish_reason: Some("stop".into()), + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_chat_response_to_proto(&original); + assert_eq!(proto.content, "Hello!"); + assert_eq!(proto.finish_reason, "stop"); + + let restored = super::proto_chat_response_to_native(&proto); + assert_eq!(restored.finish_reason, Some("stop".into())); + assert!(restored.usage.is_some()); + assert_eq!(restored.usage.as_ref().unwrap().input_tokens, 100); + } + + #[test] + fn chat_response_with_tool_calls_roundtrip() { + use crate::messages::{ChatResponse, ContentBlock, ToolCall}; + + let original = ChatResponse { + content: vec![ContentBlock::Text { + text: "Let me search.".into(), + visibility: None, + extensions: HashMap::new(), + }], + tool_calls: Some(vec![ToolCall { + id: "call_123".into(), + name: "search".into(), + arguments: HashMap::from([("query".to_string(), serde_json::json!("rust"))]), + extensions: HashMap::new(), + }]), + usage: None, + degradation: None, + finish_reason: Some("tool_calls".into()), + metadata: None, + extensions: HashMap::new(), + }; + let proto = super::native_chat_response_to_proto(&original); + assert_eq!(proto.tool_calls.len(), 1); + assert_eq!(proto.tool_calls[0].name, "search"); + + let restored = super::proto_chat_response_to_native(&proto); + let tc = restored.tool_calls.as_ref().unwrap(); + assert_eq!(tc.len(), 1); + assert_eq!(tc[0].id, "call_123"); + assert_eq!(tc[0].name, "search"); + } +``` + +### Step 2: Run tests to verify they fail + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests::chat_response 2>&1 +``` +Expected: FAIL. + +### Step 3: Implement ChatResponse conversion + +Add to `conversions.rs`: + +```rust +// --------------------------------------------------------------------------- +// ChatResponse conversions +// --------------------------------------------------------------------------- + +/// Convert a native [`ChatResponse`] to a proto `ChatResponse`. +pub fn native_chat_response_to_proto( + resp: &crate::messages::ChatResponse, +) -> super::amplifier_module::ChatResponse { + // Proto ChatResponse.content is a single string. Extract text from first text block. + let content = resp + .content + .iter() + .find_map(|block| { + if let crate::messages::ContentBlock::Text { text, .. } = block { + Some(text.clone()) + } else { + None + } + }) + .unwrap_or_default(); + + let tool_calls: Vec = resp + .tool_calls + .as_ref() + .map(|tcs| { + tcs.iter() + .map(|tc| super::amplifier_module::ToolCallMessage { + id: tc.id.clone(), + name: tc.name.clone(), + arguments_json: serde_json::to_string(&tc.arguments).unwrap_or_default(), + }) + .collect() + }) + .unwrap_or_default(); + + let usage = resp + .usage + .as_ref() + .map(|u| super::amplifier_module::Usage::from(u.clone())); + + let degradation = resp.degradation.as_ref().map(|d| { + super::amplifier_module::Degradation { + requested: d.requested.clone(), + actual: d.actual.clone(), + reason: d.reason.clone(), + } + }); + + let metadata_json = resp + .metadata + .as_ref() + .map(|m| serde_json::to_string(m).unwrap_or_default()) + .unwrap_or_default(); + + super::amplifier_module::ChatResponse { + content, + tool_calls, + usage, + degradation, + finish_reason: resp.finish_reason.clone().unwrap_or_default(), + metadata_json, + } +} + +/// Convert a proto `ChatResponse` to a native [`ChatResponse`]. +pub fn proto_chat_response_to_native( + proto: &super::amplifier_module::ChatResponse, +) -> crate::messages::ChatResponse { + use crate::messages::{ChatResponse, ContentBlock, Degradation, ToolCall}; + + let content = if proto.content.is_empty() { + vec![] + } else { + vec![ContentBlock::Text { + text: proto.content.clone(), + visibility: None, + extensions: HashMap::new(), + }] + }; + + let tool_calls = if proto.tool_calls.is_empty() { + None + } else { + Some( + proto + .tool_calls + .iter() + .map(|tc| { + let arguments = serde_json::from_str(&tc.arguments_json).unwrap_or_default(); + ToolCall { + id: tc.id.clone(), + name: tc.name.clone(), + arguments, + extensions: HashMap::new(), + } + }) + .collect(), + ) + }; + + let usage = proto.usage.as_ref().map(|u| { + crate::messages::Usage::from(u.clone()) + }); + + let degradation = proto.degradation.as_ref().map(|d| Degradation { + requested: d.requested.clone(), + actual: d.actual.clone(), + reason: d.reason.clone(), + extensions: HashMap::new(), + }); + + let finish_reason = if proto.finish_reason.is_empty() { + None + } else { + Some(proto.finish_reason.clone()) + }; + + let metadata = if proto.metadata_json.is_empty() { + None + } else { + serde_json::from_str(&proto.metadata_json).ok() + }; + + ChatResponse { + content, + tool_calls, + usage, + degradation, + finish_reason, + metadata, + extensions: HashMap::new(), + } +} +``` + +### Step 4: Run tests to verify they pass + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- conversions::tests::chat_response --nocapture 2>&1 +``` +Expected: Both tests pass. + +### Step 5: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/generated/conversions.rs && git commit -m "feat(grpc): add ChatResponse ↔ proto ChatResponse bidirectional conversion + +Includes ToolCall, Usage, and Degradation mapping." +``` + +--- + +## Task 5: HookResult Native → Proto Conversion + +**Files:** +- Modify: `crates/amplifier-core/src/bridges/grpc_hook.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Write failing tests + +Add to `mod tests` in `crates/amplifier-core/src/bridges/grpc_hook.rs`: + +```rust + // -- native_to_proto_hook_result tests -- + + #[test] + fn native_to_proto_hook_result_continue() { + let native = models::HookResult::default(); + let proto = GrpcHookBridge::native_to_proto_hook_result(&native); + assert_eq!(proto.action, amplifier_module::HookAction::Continue as i32); + } + + #[test] + fn native_to_proto_hook_result_deny_with_reason() { + let native = models::HookResult { + action: models::HookAction::Deny, + reason: Some("blocked by policy".into()), + ..Default::default() + }; + let proto = GrpcHookBridge::native_to_proto_hook_result(&native); + assert_eq!(proto.action, amplifier_module::HookAction::Deny as i32); + assert_eq!(proto.reason, "blocked by policy"); + } + + #[test] + fn native_to_proto_roundtrip() { + let native = models::HookResult { + action: models::HookAction::InjectContext, + context_injection: Some("test injection".into()), + context_injection_role: models::ContextInjectionRole::User, + ephemeral: true, + user_message: Some("found issues".into()), + user_message_level: models::UserMessageLevel::Warning, + suppress_output: true, + ..Default::default() + }; + let proto = GrpcHookBridge::native_to_proto_hook_result(&native); + let restored = GrpcHookBridge::proto_to_native_hook_result(proto); + assert_eq!(restored.action, models::HookAction::InjectContext); + assert_eq!(restored.context_injection, Some("test injection".into())); + assert_eq!(restored.context_injection_role, models::ContextInjectionRole::User); + assert!(restored.ephemeral); + assert_eq!(restored.user_message, Some("found issues".into())); + assert_eq!(restored.user_message_level, models::UserMessageLevel::Warning); + assert!(restored.suppress_output); + } +``` + +### Step 2: Run tests to verify they fail + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_hook::tests::native_to_proto 2>&1 +``` +Expected: FAIL — `native_to_proto_hook_result` doesn't exist. + +### Step 3: Implement native_to_proto_hook_result + +Add this function inside `impl GrpcHookBridge` in `crates/amplifier-core/src/bridges/grpc_hook.rs`, after the existing `proto_to_native_hook_result` function: + +```rust + /// Convert a native [`models::HookResult`] to a proto `HookResult`. + pub(crate) fn native_to_proto_hook_result(native: &models::HookResult) -> amplifier_module::HookResult { + let action = match native.action { + models::HookAction::Continue => amplifier_module::HookAction::Continue as i32, + models::HookAction::Modify => amplifier_module::HookAction::Modify as i32, + models::HookAction::Deny => amplifier_module::HookAction::Deny as i32, + models::HookAction::InjectContext => amplifier_module::HookAction::InjectContext as i32, + models::HookAction::AskUser => amplifier_module::HookAction::AskUser as i32, + }; + + let data_json = native + .data + .as_ref() + .map(|d| serde_json::to_string(d).unwrap_or_default()) + .unwrap_or_default(); + + let context_injection_role = match native.context_injection_role { + models::ContextInjectionRole::System => { + amplifier_module::ContextInjectionRole::System as i32 + } + models::ContextInjectionRole::User => { + amplifier_module::ContextInjectionRole::User as i32 + } + models::ContextInjectionRole::Assistant => { + amplifier_module::ContextInjectionRole::Assistant as i32 + } + }; + + let approval_default = match native.approval_default { + models::ApprovalDefault::Allow => amplifier_module::ApprovalDefault::Approve as i32, + models::ApprovalDefault::Deny => amplifier_module::ApprovalDefault::Deny as i32, + }; + + let user_message_level = match native.user_message_level { + models::UserMessageLevel::Info => amplifier_module::UserMessageLevel::Info as i32, + models::UserMessageLevel::Warning => amplifier_module::UserMessageLevel::Warning as i32, + models::UserMessageLevel::Error => amplifier_module::UserMessageLevel::Error as i32, + }; + + amplifier_module::HookResult { + action, + data_json, + reason: native.reason.clone().unwrap_or_default(), + context_injection: native.context_injection.clone().unwrap_or_default(), + context_injection_role, + ephemeral: native.ephemeral, + approval_prompt: native.approval_prompt.clone().unwrap_or_default(), + approval_options: native.approval_options.clone().unwrap_or_default(), + approval_timeout: Some(native.approval_timeout), + approval_default, + suppress_output: native.suppress_output, + user_message: native.user_message.clone().unwrap_or_default(), + user_message_level, + user_message_source: native.user_message_source.clone().unwrap_or_default(), + append_to_last_tool_result: native.append_to_last_tool_result, + } + } +``` + +### Step 4: Run tests to verify they pass + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_hook::tests::native_to_proto --nocapture 2>&1 +``` +Expected: All 3 tests pass. + +### Step 5: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/bridges/grpc_hook.rs && git commit -m "feat(grpc): add native HookResult → proto conversion (reverse direction)" +``` + +--- + +## Task 6: Fix GrpcContextBridge — Full-Fidelity Message Conversion + +**Files:** +- Modify: `crates/amplifier-core/src/bridges/grpc_context.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Rewrite value_to_proto_message and proto_message_to_value + +In `crates/amplifier-core/src/bridges/grpc_context.rs`, replace the two functions and their TODO comments (lines 52-88): + +```rust + fn value_to_proto_message(message: &Value) -> amplifier_module::Message { + // Type-safe parsing: try to deserialize the Value as a native Message + match serde_json::from_value::(message.clone()) { + Ok(native_msg) => { + crate::generated::conversions::native_message_to_proto(&native_msg) + } + Err(e) => { + log::warn!( + "Failed to deserialize Value as Message: {e} — falling back to TextContent" + ); + let json_string = serde_json::to_string(message).unwrap_or_default(); + amplifier_module::Message { + role: 0, + content: Some(amplifier_module::message::Content::TextContent(json_string)), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + } + } + } + } + + fn proto_message_to_value(msg: &lifier_module::Message) -> Value { + match crate::generated::conversions::proto_message_to_native(msg) { + Ok(native_msg) => { + serde_json::to_value(&native_msg).unwrap_or(Value::Null) + } + Err(e) => { + log::warn!("Failed to convert proto Message to native: {e}"); + Value::Null + } + } + } +``` + +### Step 2: Fix get_messages_for_request — populate provider_name + +In the same file, update `get_messages_for_request` (around line 117). Replace the TODO block: + +```rust + // TODO(grpc-v2): provider_name parameter is not transmitted to the remote + // context manager. The _provider parameter is accepted but unused. + log::debug!( + "get_messages_for_request: provider_name is not transmitted through gRPC bridge" + ); + let request = amplifier_module::GetMessagesForRequestParams { + token_budget: token_budget.unwrap_or(0) as i32, + provider_name: String::new(), // TODO(grpc-v2): extract from _provider param + }; +``` + +With: +```rust + let provider_name = _provider + .as_ref() + .map(|p| p.name().to_string()) + .unwrap_or_default(); + let request = amplifier_module::GetMessagesForRequestParams { + token_budget: token_budget.unwrap_or(0) as i32, + provider_name, + }; +``` + +Also remove the leading underscore from `_provider` in the function signature (line 120). Change `_provider` to `provider`. + +### Step 3: Update tests to assert full fidelity + +Replace the three S-1/S-2 tests (lines 237-307) with updated versions: + +```rust + // —— Message conversion: full fidelity —— + + /// value_to_proto_message correctly maps a typed Message value. + #[test] + fn value_to_proto_message_typed_message() { + let val = serde_json::json!({ + "role": "user", + "content": "hello", + "name": "alice" + }); + let msg = GrpcContextBridge::value_to_proto_message(&val); + assert_eq!(msg.role, amplifier_module::Role::User as i32); + assert_eq!(msg.name, "alice"); + match msg.content { + Some(amplifier_module::message::Content::TextContent(text)) => { + assert_eq!(text, "hello"); + } + other => panic!("expected TextContent, got {other:?}"), + } + } + + /// proto_message_to_value correctly round-trips a typed Message. + #[test] + fn proto_message_to_value_roundtrip() { + let msg = amplifier_module::Message { + role: amplifier_module::Role::Assistant as i32, + content: Some(amplifier_module::message::Content::TextContent( + "response text".to_string(), + )), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }; + let val = GrpcContextBridge::proto_message_to_value(&msg); + assert_eq!(val["role"], "assistant"); + assert_eq!(val["content"], "response text"); + } + + /// None content maps to Value::Null. + #[test] + fn proto_message_to_value_none_content_is_null() { + let msg = amplifier_module::Message { + role: 0, + content: None, + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }; + assert_eq!(GrpcContextBridge::proto_message_to_value(&msg), Value::Null); + } + + /// BlockContent is now handled (not mapped to Null). + #[test] + fn proto_message_to_value_block_content_handled() { + let msg = amplifier_module::Message { + role: amplifier_module::Role::Assistant as i32, + content: Some(amplifier_module::message::Content::BlockContent( + amplifier_module::ContentBlockList { + blocks: vec![amplifier_module::ContentBlock { + block: Some(amplifier_module::content_block::Block::TextBlock( + amplifier_module::TextBlock { + text: "block text".into(), + }, + )), + visibility: 0, + }], + }, + )), + name: String::new(), + tool_call_id: String::new(), + metadata_json: String::new(), + }; + let val = GrpcContextBridge::proto_message_to_value(&msg); + assert_ne!(val, Value::Null, "BlockContent should no longer map to Null"); + assert_eq!(val["role"], "assistant"); + // Content should be an array of blocks + assert!(val["content"].is_array(), "Block content should be array"); + } +``` + +### Step 4: Run tests to verify they pass + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_context::tests --nocapture 2>&1 +``` +Expected: All tests pass. + +### Step 5: Run clippy + +Run: +```bash +cd crates/amplifier-core && cargo clippy -p amplifier-core -- -D warnings 2>&1 +``` +Expected: No warnings. + +### Step 6: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/bridges/grpc_context.rs && git commit -m "fix(grpc): full-fidelity Message conversion in GrpcContextBridge + +Replace lossy value_to_proto_message/proto_message_to_value with +type-safe serde_json::from_value::() and the new bidirectional +conversions. Populate provider_name in get_messages_for_request()." +``` + +--- + +## Task 7: Fix GrpcProviderBridge::complete() Stub + +**Files:** +- Modify: `crates/amplifier-core/src/bridges/grpc_provider.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Write a test for the complete method + +Add to `mod tests` in `crates/amplifier-core/src/bridges/grpc_provider.rs`: + +```rust + /// The complete method should use ChatRequest/ChatResponse conversions + /// (not return a stub error). We can't test the gRPC call itself without + /// a server, but we verify the conversion path compiles. + #[test] + fn complete_uses_conversion_functions() { + // Verify the conversion functions exist and are callable + use crate::generated::conversions::{native_chat_request_to_proto, proto_chat_response_to_native}; + use crate::messages::{ChatRequest, Message, MessageContent, Role}; + + let req = ChatRequest { + messages: vec![Message { + role: Role::User, + content: MessageContent::Text("test".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: std::collections::HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: std::collections::HashMap::new(), + }; + let proto = native_chat_request_to_proto(&req); + assert!(!proto.messages.is_empty()); + // Verify reverse direction compiles + let _native = proto_chat_response_to_native(&crate::generated::amplifier_module::ChatResponse::default()); + } +``` + +### Step 2: Implement complete() using real conversions + +Replace the `complete` method body in `crates/amplifier-core/src/bridges/grpc_provider.rs` (lines 143-164): + +```rust + fn complete( + &self, + request: ChatRequest, + ) -> Pin> + Send + '_>> { + Box::pin(async move { + let proto_request = + crate::generated::conversions::native_chat_request_to_proto(&request); + + let response = { + let mut client = self.client.lock().await; + client.complete(proto_request).await.map_err(|e| { + ProviderError::Other { + message: format!("gRPC call failed: {}", e), + provider: Some(self.name.clone()), + model: None, + retry_after: None, + status_code: None, + retryable: false, + delay_multiplier: None, + } + })? + }; + + let proto_response = response.into_inner(); + Ok(crate::generated::conversions::proto_chat_response_to_native( + &proto_response, + )) + }) + } +``` + +### Step 3: Run tests and clippy + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_provider::tests --nocapture 2>&1 +cd crates/amplifier-core && cargo clippy -p amplifier-core -- -D warnings 2>&1 +``` +Expected: All pass. + +### Step 4: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/bridges/grpc_provider.rs && git commit -m "fix(grpc): implement GrpcProviderBridge::complete() using ChatRequest/ChatResponse conversions + +Replaces the Phase 2 stub that returned Err(ProviderError::Other)." +``` + +--- + +## Task 8: Session Routing — Add session_id to GrpcOrchestratorBridge + +**Files:** +- Modify: `crates/amplifier-core/src/bridges/grpc_orchestrator.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Add session_id field and update constructor + +In `crates/amplifier-core/src/bridges/grpc_orchestrator.rs`, modify the struct and constructor: + +Replace the struct definition (lines 39-41): +```rust +pub struct GrpcOrchestratorBridge { + client: tokio::sync::Mutex>, + session_id: String, +} +``` + +Replace the `connect` method (lines 43-51): +```rust + /// Connect to a remote orchestrator service. + /// + /// # Arguments + /// + /// * `endpoint` — gRPC endpoint URL. + /// * `session_id` — Session ID for KernelService callback routing. + pub async fn connect( + endpoint: &str, + session_id: String, + ) -> Result> { + let client = OrchestratorServiceClient::connect(endpoint.to_string()).await?; + + Ok(Self { + client: tokio::sync::Mutex::new(client), + session_id, + }) + } +``` + +### Step 2: Update execute() — populate session_id, document discarded params + +Replace the entire `Orchestrator` impl (lines 54-98): + +```rust +impl Orchestrator for GrpcOrchestratorBridge { + /// Execute a prompt via the remote orchestrator. + /// + /// The 5 subsystem parameters (`context`, `providers`, `tools`, `hooks`, + /// `coordinator`) are not transmitted over gRPC — this is by design. + /// Remote orchestrators access these via KernelService RPCs using the + /// `session_id` stored on this bridge at construction time. + fn execute( + &self, + prompt: String, + _context: Arc, + _providers: HashMap>, + _tools: HashMap>, + _hooks: Value, + _coordinator: Value, + ) -> Pin> + Send + '_>> { + Box::pin(async move { + log::debug!( + "GrpcOrchestratorBridge::execute — context, providers, tools, hooks, and coordinator \ + parameters are not transmitted via gRPC (remote orchestrator uses KernelService callbacks)" + ); + let request = amplifier_module::OrchestratorExecuteRequest { + prompt, + session_id: self.session_id.clone(), + }; + + let response = { + let mut client = self.client.lock().await; + client.execute(request).await.map_err(|e| { + AmplifierError::Session(SessionError::Other { + message: format!("gRPC: {}", e), + }) + })? + }; + + let resp = response.into_inner(); + + if !resp.error.is_empty() { + return Err(AmplifierError::Session(SessionError::Other { + message: resp.error, + })); + } + + Ok(resp.response) + }) + } +} +``` + +### Step 3: Update tests + +Replace the test module (lines 100-146): + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[allow(dead_code)] + fn assert_orchestrator_trait_object(_: Arc) {} + + /// Compile-time check: GrpcOrchestratorBridge can be wrapped in Arc. + #[allow(dead_code)] + fn grpc_orchestrator_bridge_is_orchestrator() { + fn _check(bridge: GrpcOrchestratorBridge) { + assert_orchestrator_trait_object(Arc::new(bridge)); + } + } + + /// execute() passes session_id to the remote orchestrator. The 5 subsystem + /// parameters are intentionally not transmitted — remote orchestrators + /// access them via KernelService RPCs. + #[test] + fn execute_documents_by_design_param_handling() { + let full_source = include_str!("grpc_orchestrator.rs"); + let impl_source = full_source + .split("\n#[cfg(test)]") + .next() + .expect("source must contain an impl section before #[cfg(test)]"); + + // session_id should be populated from self, not empty + assert!( + impl_source.contains("session_id: self.session_id.clone()"), + "session_id must be populated from self.session_id" + ); + // The log::debug documenting the by-design parameter handling should still exist + assert!( + impl_source.contains("log::debug!("), + "execute() impl must contain a log::debug!() call documenting parameter handling" + ); + // No TODO(grpc-v2) markers should remain + assert!( + !impl_source.contains("TODO(grpc-v2)"), + "All TODO(grpc-v2) markers should be removed from orchestrator bridge" + ); + } +} +``` + +### Step 4: Fix doc example + +The doc example at the top of the file (lines 10-18) references `connect` with one parameter. Update it to pass a session_id: + +```rust +//! let bridge = GrpcOrchestratorBridge::connect("http://localhost:50051", "session-123".into()).await?; +``` + +### Step 5: Run tests and clippy + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_orchestrator --nocapture 2>&1 +cd crates/amplifier-core && cargo clippy -p amplifier-core -- -D warnings 2>&1 +``` + +**Important:** If other files call `GrpcOrchestratorBridge::connect()` with one argument, they'll need updating too. Search for callers: +```bash +cd crates/amplifier-core && grep -rn "GrpcOrchestratorBridge::connect" src/ 2>&1 +``` +Fix any callers to pass the additional `session_id` parameter. + +Expected: All pass. + +### Step 6: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/bridges/grpc_orchestrator.rs && git commit -m "fix(grpc): add session_id to GrpcOrchestratorBridge for KernelService callback routing + +- session_id stored on struct at construction, populated in requests +- Document 5 discarded params as by-design (remote uses KernelService) +- Remove all TODO(grpc-v2) markers from this file" +``` + +--- + +## Task 9: Session — Arc\ and coordinator_shared() + +**Files:** +- Modify: `crates/amplifier-core/src/session.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Write a failing test + +Add to `mod tests` in `crates/amplifier-core/src/session.rs`: + +```rust + #[test] + fn coordinator_shared_returns_arc() { + let config = SessionConfig::minimal("loop-basic", "context-simple"); + let session = Session::new(config, None, None); + let shared: Arc = session.coordinator_shared(); + // Verify it points to the same coordinator + assert!(shared.tools().is_empty()); + } +``` + +### Step 2: Run test to verify it fails + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- session::tests::coordinator_shared 2>&1 +``` +Expected: FAIL — `coordinator_shared()` doesn't exist yet. + +### Step 3: Change Session to use Arc\ + +In `crates/amplifier-core/src/session.rs`, make these changes: + +Add `use std::sync::Arc;` to the imports at the top (after the existing `use` statements). + +Change the `coordinator` field in the `Session` struct (line 141): +```rust + coordinator: Arc, +``` + +In `Session::new()` (around line 161), change: +```rust + let coordinator = Coordinator::new(config.config); +``` +to: +```rust + let coordinator = Arc::new(Coordinator::new(config.config)); +``` + +Change `coordinator()` (line 221-223): +```rust + pub fn coordinator(&self) -> &Coordinator { + &self.coordinator + } +``` + +Change `coordinator_mut()` (line 226-228): +```rust + /// Mutable reference to the coordinator (for mounting modules). + /// + /// # Panics + /// + /// Panics if the Arc has been shared (i.e., `coordinator_shared()` was + /// called). Only call this during setup, before sharing the coordinator. + pub fn coordinator_mut(&mut self) -> &mut Coordinator { + Arc::get_mut(&mut self.coordinator) + .expect("coordinator_mut() called after Arc was shared — only use during setup") + } +``` + +Add `coordinator_shared()` after `coordinator_mut()`: +```rust + /// Get a shared reference to the coordinator. + /// + /// Returns an `Arc` suitable for passing to + /// `KernelServiceImpl`. After calling this, `coordinator_mut()` will + /// panic because the Arc has multiple owners. + pub fn coordinator_shared(&self) -> Arc { + Arc::clone(&self.coordinator) + } +``` + +### Step 4: Run ALL tests to verify nothing broke + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core --verbose 2>&1 | tail -60 +``` +Expected: All tests pass including the new `coordinator_shared` test. The existing tests that use `coordinator_mut()` still work because `coordinator_shared()` hasn't been called yet in those tests. + +### Step 5: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/session.rs && git commit -m "refactor(session): store Coordinator as Arc for KernelService sharing + +- Session now holds Arc internally +- coordinator() returns &Coordinator (unchanged API) +- coordinator_mut() uses Arc::get_mut (panics if shared) +- New coordinator_shared() -> Arc for KernelService" +``` + +--- + +## Task 10: KernelService — GetCapability + RegisterCapability + +**Files:** +- Modify: `crates/amplifier-core/src/grpc_server.rs` +- Test: inline `#[cfg(test)]` in same file + +### Step 1: Write failing tests + +Add to `mod tests` in `crates/amplifier-core/src/grpc_server.rs`: + +```rust + #[tokio::test] + async fn register_and_get_capability() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + // Register a capability + let req = Request::new(amplifier_module::RegisterCapabilityRequest { + name: "streaming".into(), + value_json: r#"{"enabled": true}"#.into(), + }); + let resp = service.register_capability(req).await; + assert!(resp.is_ok()); + + // Get it back + let req = Request::new(amplifier_module::GetCapabilityRequest { + name: "streaming".into(), + }); + let resp = service.get_capability(req).await.unwrap().into_inner(); + assert!(resp.found); + assert_eq!(resp.value_json, r#"{"enabled":true}"#); + } + + #[tokio::test] + async fn get_capability_not_found() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let req = Request::new(amplifier_module::GetCapabilityRequest { + name: "nonexistent".into(), + }); + let resp = service.get_capability(req).await.unwrap().into_inner(); + assert!(!resp.found); + assert!(resp.value_json.is_empty()); + } +``` + +### Step 2: Run tests to verify they fail + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_server::tests::register_and_get 2>&1 +``` +Expected: FAIL — RPCs return `Status::unimplemented`. + +### Step 3: Implement the two RPCs + +In `crates/amplifier-core/src/grpc_server.rs`, replace the `register_capability` method: + +```rust + async fn register_capability( + &self, + request: Request, + ) -> Result, Status> { + let req = request.into_inner(); + let value: serde_json::Value = serde_json::from_str(&req.value_json) + .map_err(|e| Status::invalid_argument(format!("Invalid value_json: {e}")))?; + self.coordinator.register_capability(&req.name, value); + Ok(Response::new(amplifier_module::Empty {})) + } +``` + +Replace the `get_capability` method: + +```rust + async fn get_capability( + &self, + request: Request, + ) -> Result, Status> { + let req = request.into_inner(); + match self.coordinator.get_capability(&req.name) { + Some(value) => { + let value_json = serde_json::to_string(&value) + .map_err(|e| Status::internal(format!("Failed to serialize capability: {e}")))?; + Ok(Response::new(amplifier_module::GetCapabilityResponse { + found: true, + value_json, + })) + } + None => Ok(Response::new(amplifier_module::GetCapabilityResponse { + found: false, + value_json: String::new(), + })), + } + } +``` + +### Step 4: Run tests to verify they pass + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_server::tests --nocapture 2>&1 +``` +Expected: All tests pass. + +### Step 5: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/grpc_server.rs && git commit -m "feat(grpc): implement GetCapability + RegisterCapability KernelService RPCs" +``` + +--- + +## Task 11: KernelService — GetMountedModule + +**Files:** +- Modify: `crates/amplifier-core/src/grpc_server.rs` +- Test: inline `#[cfg(test)]` + +### Step 1: Write failing test + +Add to `mod tests`: + +```rust + #[tokio::test] + async fn get_mounted_module_tool_found() { + use crate::testing::FakeTool; + + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_tool("echo", Arc::new(FakeTool::new("echo", "echoes input"))); + let service = KernelServiceImpl::new(coord); + + let req = Request::new(amplifier_module::GetMountedModuleRequest { + module_name: "echo".into(), + module_type: amplifier_module::ModuleType::Tool as i32, + }); + let resp = service.get_mounted_module(req).await.unwrap().into_inner(); + assert!(resp.found); + assert_eq!(resp.info.as_ref().unwrap().name, "echo"); + } + + #[tokio::test] + async fn get_mounted_module_not_found() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let req = Request::new(amplifier_module::GetMountedModuleRequest { + module_name: "nonexistent".into(), + module_type: amplifier_module::ModuleType::Tool as i32, + }); + let resp = service.get_mounted_module(req).await.unwrap().into_inner(); + assert!(!resp.found); + } +``` + +### Step 2: Implement GetMountedModule + +Replace the stub in `grpc_server.rs`: + +```rust + async fn get_mounted_module( + &self, + request: Request, + ) -> Result, Status> { + let req = request.into_inner(); + let module_type = amplifier_module::ModuleType::try_from(req.module_type) + .unwrap_or(amplifier_module::ModuleType::Unspecified); + + let found = match module_type { + amplifier_module::ModuleType::Tool => self.coordinator.get_tool(&req.module_name).is_some(), + amplifier_module::ModuleType::Provider => { + self.coordinator.get_provider(&req.module_name).is_some() + } + _ => false, + }; + + if found { + let info = amplifier_module::ModuleInfo { + id: req.module_name.clone(), + name: req.module_name, + version: String::new(), + module_type: req.module_type, + mount_point: String::new(), + description: String::new(), + config_schema_json: String::new(), + capabilities: vec![], + author: String::new(), + }; + Ok(Response::new(amplifier_module::GetMountedModuleResponse { + found: true, + info: Some(info), + })) + } else { + Ok(Response::new(amplifier_module::GetMountedModuleResponse { + found: false, + info: None, + })) + } + } +``` + +### Step 3: Run tests, commit + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_server::tests --nocapture 2>&1 +``` + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/grpc_server.rs && git commit -m "feat(grpc): implement GetMountedModule KernelService RPC" +``` + +--- + +## Task 12: KernelService — AddMessage + GetMessages + +**Files:** +- Modify: `crates/amplifier-core/src/grpc_server.rs` +- Test: inline `#[cfg(test)]` + +### Step 1: Write failing tests + +Add to `mod tests`: + +```rust + #[tokio::test] + async fn add_and_get_messages() { + use crate::testing::FakeContextManager; + use crate::generated::conversions; + + let coord = Arc::new(Coordinator::new(Default::default())); + coord.set_context(Arc::new(FakeContextManager::new())); + let service = KernelServiceImpl::new(coord); + + // Build a proto Message + let native_msg = crate::messages::Message { + role: crate::messages::Role::User, + content: crate::messages::MessageContent::Text("hello".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: std::collections::HashMap::new(), + }; + let proto_msg = conversions::native_message_to_proto(&native_msg); + + // Add it + let req = Request::new(amplifier_module::KernelAddMessageRequest { + session_id: "test".into(), + message: Some(proto_msg), + }); + let resp = service.add_message(req).await; + assert!(resp.is_ok(), "add_message should succeed"); + + // Get messages back + let req = Request::new(amplifier_module::GetMessagesRequest { + session_id: "test".into(), + }); + let resp = service.get_messages(req).await.unwrap().into_inner(); + assert!(!resp.messages.is_empty(), "should have at least one message"); + } +``` + +### Step 2: Implement AddMessage and GetMessages + +Replace the stubs: + +```rust + async fn add_message( + &self, + request: Request, + ) -> Result, Status> { + let req = request.into_inner(); + let proto_msg = req + .message + .ok_or_else(|| Status::invalid_argument("missing message"))?; + + let native_msg = crate::generated::conversions::proto_message_to_native(&proto_msg) + .map_err(|e| Status::invalid_argument(format!("Invalid message: {e}")))?; + + let message_value = serde_json::to_value(&native_msg) + .map_err(|e| Status::internal(format!("Failed to serialize message: {e}")))?; + + let context = self + .coordinator + .context() + .ok_or_else(|| Status::failed_precondition("No context manager mounted"))?; + + context + .add_message(message_value) + .await + .map_err(|e| Status::internal(format!("Failed to add message: {e}")))?; + + Ok(Response::new(amplifier_module::Empty {})) + } + + async fn get_messages( + &self, + _request: Request, + ) -> Result, Status> { + let context = self + .coordinator + .context() + .ok_or_else(|| Status::failed_precondition("No context manager mounted"))?; + + let messages = context + .get_messages() + .await + .map_err(|e| Status::internal(format!("Failed to get messages: {e}")))?; + + let proto_messages: Vec = messages + .iter() + .filter_map(|v| { + match serde_json::from_value::(v.clone()) { + Ok(native) => { + Some(crate::generated::conversions::native_message_to_proto(&native)) + } + Err(e) => { + log::warn!("Failed to convert message to proto: {e}"); + None + } + } + }) + .collect(); + + Ok(Response::new(amplifier_module::GetMessagesResponse { + messages: proto_messages, + })) + } +``` + +### Step 3: Run tests, commit + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_server::tests --nocapture 2>&1 +``` + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/grpc_server.rs && git commit -m "feat(grpc): implement AddMessage + GetMessages KernelService RPCs" +``` + +--- + +## Task 13: KernelService — EmitHook + EmitHookAndCollect + +**Files:** +- Modify: `crates/amplifier-core/src/grpc_server.rs` +- Test: inline `#[cfg(test)]` + +### Step 1: Write failing tests + +Add to `mod tests`: + +```rust + #[tokio::test] + async fn emit_hook_returns_continue_with_no_handlers() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let req = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".into(), + data_json: "{}".into(), + }); + let resp = service.emit_hook(req).await.unwrap().into_inner(); + assert_eq!(resp.action, amplifier_module::HookAction::Continue as i32); + } + + #[tokio::test] + async fn emit_hook_and_collect_returns_empty_with_no_handlers() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let req = Request::new(amplifier_module::EmitHookAndCollectRequest { + event: "test:event".into(), + data_json: "{}".into(), + timeout_seconds: 5.0, + }); + let resp = service.emit_hook_and_collect(req).await.unwrap().into_inner(); + assert!(resp.responses_json.is_empty()); + } +``` + +### Step 2: Implement EmitHook and EmitHookAndCollect + +Add `use std::time::Duration;` to the imports at the top of `grpc_server.rs`. + +Replace the stubs: + +```rust + async fn emit_hook( + &self, + request: Request, + ) -> Result, Status> { + let req = request.into_inner(); + let data: serde_json::Value = serde_json::from_str(&req.data_json) + .map_err(|e| Status::invalid_argument(format!("Invalid data_json: {e}")))?; + + let result = self.coordinator.hooks().emit(&req.event, data).await; + let proto_result = + crate::bridges::grpc_hook::GrpcHookBridge::native_to_proto_hook_result(&result); + Ok(Response::new(proto_result)) + } + + async fn emit_hook_and_collect( + &self, + request: Request, + ) -> Result, Status> { + let req = request.into_inner(); + let data: serde_json::Value = serde_json::from_str(&req.data_json) + .map_err(|e| Status::invalid_argument(format!("Invalid data_json: {e}")))?; + + let timeout = Duration::from_secs_f64(req.timeout_seconds.max(0.1)); + let results = self + .coordinator + .hooks() + .emit_and_collect(&req.event, data, timeout) + .await; + + let responses_json: Vec = results + .iter() + .filter_map(|r| serde_json::to_string(r).ok()) + .collect(); + + Ok(Response::new( + amplifier_module::EmitHookAndCollectResponse { responses_json }, + )) + } +``` + +### Step 3: Run tests, commit + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_server::tests --nocapture 2>&1 +``` + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/grpc_server.rs && git commit -m "feat(grpc): implement EmitHook + EmitHookAndCollect KernelService RPCs" +``` + +--- + +## Task 14: KernelService — CompleteWithProvider + +**Files:** +- Modify: `crates/amplifier-core/src/grpc_server.rs` +- Test: inline `#[cfg(test)]` + +### Step 1: Write failing test + +Add to `mod tests`: + +```rust + #[tokio::test] + async fn complete_with_provider_not_found() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + + let req = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "nonexistent".into(), + request: Some(amplifier_module::ChatRequest::default()), + }); + let resp = service.complete_with_provider(req).await; + assert!(resp.is_err()); + let status = resp.unwrap_err(); + assert_eq!(status.code(), tonic::Code::NotFound); + } + + #[tokio::test] + async fn complete_with_provider_success() { + use crate::testing::FakeProvider; + + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_provider("test", Arc::new(FakeProvider::new("test", "hello response"))); + let service = KernelServiceImpl::new(coord); + + // Build a minimal ChatRequest + let native_req = crate::messages::ChatRequest { + messages: vec![crate::messages::Message { + role: crate::messages::Role::User, + content: crate::messages::MessageContent::Text("hi".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: std::collections::HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: std::collections::HashMap::new(), + }; + let proto_req = crate::generated::conversions::native_chat_request_to_proto(&native_req); + + let req = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "test".into(), + request: Some(proto_req), + }); + let resp = service.complete_with_provider(req).await; + assert!(resp.is_ok(), "complete should succeed: {:?}", resp.err()); + } +``` + +### Step 2: Implement CompleteWithProvider + +Replace the stub: + +```rust + async fn complete_with_provider( + &self, + request: Request, + ) -> Result, Status> { + let req = request.into_inner(); + + let provider = self + .coordinator + .get_provider(&req.provider_name) + .ok_or_else(|| { + Status::not_found(format!("Provider not found: {}", req.provider_name)) + })?; + + let proto_chat_req = req + .request + .ok_or_else(|| Status::invalid_argument("missing request"))?; + + let native_req = + crate::generated::conversions::proto_chat_request_to_native(&proto_chat_req) + .map_err(|e| Status::invalid_argument(format!("Invalid ChatRequest: {e}")))?; + + let native_resp = provider + .complete(native_req) + .await + .map_err(|e| Status::internal(format!("Provider error: {e}")))?; + + let proto_resp = + crate::generated::conversions::native_chat_response_to_proto(&native_resp); + + Ok(Response::new(proto_resp)) + } +``` + +### Step 3: Run tests, commit + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_server::tests --nocapture 2>&1 +``` + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/grpc_server.rs && git commit -m "feat(grpc): implement CompleteWithProvider KernelService RPC" +``` + +--- + +## Task 15: KernelService — CompleteWithProviderStreaming + +**Files:** +- Modify: `crates/amplifier-core/src/grpc_server.rs` +- Test: inline `#[cfg(test)]` + +### Step 1: Write failing test + +Add to `mod tests`: + +```rust + #[tokio::test] + async fn complete_with_provider_streaming_wraps_single_response() { + use crate::testing::FakeProvider; + use tokio_stream::StreamExt; + + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_provider("test", Arc::new(FakeProvider::new("test", "streamed response"))); + let service = KernelServiceImpl::new(coord); + + let native_req = crate::messages::ChatRequest { + messages: vec![crate::messages::Message { + role: crate::messages::Role::User, + content: crate::messages::MessageContent::Text("hi".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: std::collections::HashMap::new(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: std::collections::HashMap::new(), + }; + let proto_req = crate::generated::conversions::native_chat_request_to_proto(&native_req); + + let req = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "test".into(), + request: Some(proto_req), + }); + let resp = service.complete_with_provider_streaming(req).await; + assert!(resp.is_ok(), "streaming should succeed"); + + // Collect stream — should have exactly 1 chunk + let mut stream = resp.unwrap().into_inner(); + let mut chunks = vec![]; + while let Some(item) = stream.next().await { + chunks.push(item.expect("chunk should be Ok")); + } + assert_eq!(chunks.len(), 1, "one-shot stream should produce exactly 1 chunk"); + } +``` + +### Step 2: Implement CompleteWithProviderStreaming + +Replace the stub: + +```rust + async fn complete_with_provider_streaming( + &self, + request: Request, + ) -> Result, Status> { + let req = request.into_inner(); + + let provider = self + .coordinator + .get_provider(&req.provider_name) + .ok_or_else(|| { + Status::not_found(format!("Provider not found: {}", req.provider_name)) + })?; + + let proto_chat_req = req + .request + .ok_or_else(|| Status::invalid_argument("missing request"))?; + + let native_req = + crate::generated::conversions::proto_chat_request_to_native(&proto_chat_req) + .map_err(|e| Status::invalid_argument(format!("Invalid ChatRequest: {e}")))?; + + // Wrap single complete() as one-shot stream. + // True streaming requires Provider trait extension (future work). + let (tx, rx) = tokio::sync::mpsc::channel(1); + + let provider = provider.clone(); + tokio::spawn(async move { + match provider.complete(native_req).await { + Ok(native_resp) => { + let proto_resp = + crate::generated::conversions::native_chat_response_to_proto(&native_resp); + let _ = tx.send(Ok(proto_resp)).await; + } + Err(e) => { + let _ = tx + .send(Err(Status::internal(format!("Provider error: {e}")))) + .await; + } + } + }); + + Ok(Response::new(tokio_stream::wrappers::ReceiverStream::new( + rx, + ))) + } +``` + +### Step 3: Run tests, commit + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core -- grpc_server::tests --nocapture 2>&1 +``` + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add crates/amplifier-core/src/grpc_server.rs && git commit -m "feat(grpc): implement CompleteWithProviderStreaming as one-shot stream + +Wraps single provider.complete() into a streamed response with 1 chunk. +True streaming requires Provider trait extension (tracked as future work)." +``` + +--- + +## Task 16: Cleanup — Remove TODO(grpc-v2) Markers + +**Files:** +- Modify: any files still containing `TODO(grpc-v2)` +- Test: verify with grep + +### Step 1: Find all remaining TODO(grpc-v2) markers + +Run: +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && grep -rn "TODO(grpc-v2)" crates/ proto/ docs/ 2>&1 +``` + +Review each hit. At this point, all code TODOs should already be fixed by Tasks 0-15. If any remain, fix them now. + +### Step 2: Update the audit doc references + +If `docs/plans/2026-03-03-audit-fix-design.md` exists and references `TODO(grpc-v2)` markers as "deferred", add a note that they are resolved: + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && grep -n "grpc-v2\|TODO.*grpc" docs/plans/2026-03-03-audit-fix-design.md 2>&1 +``` + +If references are found, add a note near them: ``. + +### Step 3: Verify no TODO(grpc-v2) markers remain in source code + +Run: +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && grep -rn "TODO(grpc-v2)" crates/ 2>&1 +``` +Expected: No matches (exit code 1). + +### Step 4: Run full test suite + +Run: +```bash +cd crates/amplifier-core && cargo test -p amplifier-core --verbose 2>&1 | tail -60 +``` +Expected: All tests pass. + +### Step 5: Run clippy + +Run: +```bash +cd crates/amplifier-core && cargo clippy -p amplifier-core -- -D warnings 2>&1 +``` +Expected: No warnings. + +### Step 6: Commit + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && git add -A && git commit -m "chore(grpc): remove all TODO(grpc-v2) markers — debt fully resolved + +All 15 code TODOs fixed, all 8 KernelService RPCs implemented, +GrpcProviderBridge::complete() working, session routing via session_id." +``` + +--- + +## Final Verification Checklist + +After all 17 tasks are complete, run these commands: + +```bash +# 1. No TODO(grpc-v2) markers remain +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core && grep -rn "TODO(grpc-v2)" crates/ + +# 2. Full test suite passes +cd crates/amplifier-core && cargo test -p amplifier-core --verbose + +# 3. Clippy clean +cd crates/amplifier-core && cargo clippy -p amplifier-core -- -D warnings + +# 4. Build succeeds +cd crates/amplifier-core && cargo build + +# 5. Git log shows layered commits +git log --oneline -20 +``` + +All 5 checks must pass before the PR is ready. diff --git a/docs/plans/2026-03-04-phase2-napi-rs-typescript-design.md b/docs/plans/2026-03-04-phase2-napi-rs-typescript-design.md new file mode 100644 index 0000000..899af49 --- /dev/null +++ b/docs/plans/2026-03-04-phase2-napi-rs-typescript-design.md @@ -0,0 +1,305 @@ +# Cross-Language SDK Phase 2: TypeScript/Napi-RS Bindings Design + +## Goal + +Deliver TypeScript/Node.js bindings for the amplifier-core Rust kernel via Napi-RS, enabling three consumer types: TypeScript host apps (full agent loop), TypeScript in-process modules (Tool/Provider/etc. implementations), and TypeScript gRPC module authoring helpers — while batching two dependency security upgrades (pyo3, wasmtime). + +## Background + +This is Phase 2 of the 5-phase Cross-Language SDK plan documented in [`2026-03-02-cross-language-session-sdk-design.md`](./2026-03-02-cross-language-session-sdk-design.md). Phase 1 (complete) delivered the Python/PyO3 bridge — 4 classes wrapping the Rust kernel in ~2,885 lines of `bindings/python/src/lib.rs`. Phase 2 mirrors this for TypeScript. + +The existing Python bridge uses a "hybrid coordinator" pattern: Python Protocol objects are stored in a Python-side `mount_points` dict, while the Rust kernel handles config, turn tracking, and cancellation. The TypeScript binding follows the same pattern with a JS-side `Map` for module storage. + +### Three Consumer Types + +1. **TypeScript host apps** — full agent loop in Node.js (`new AmplifierSession(config) → execute() → cleanup()`) +2. **TypeScript in-process modules** — implement `Tool`/`Provider`/etc. interfaces, mount directly in a TS host +3. **TypeScript gRPC modules** — implement proto services, plug into any host (Python, Rust, future Go) via transport-invisible bridge + +## Approach + +Single-crate Napi-RS bridge mirroring the proven PyO3 structure. The Python bridge is a working, battle-tested pattern. The TypeScript binding mirrors it structurally: same 4 classes, same hybrid coordinator, same async bridging strategy adapted for Node's event loop. + +A single `lib.rs` file (matching Python's approach) keeps things simple and greppable. Splitting into modules is tracked as future work when the file outgrows maintainability. + +## Architecture + +``` +bindings/node/ +├── Cargo.toml # napi-rs crate +├── src/lib.rs # All Napi-RS bindings (mirrors bindings/python/src/lib.rs) +├── package.json # npm package config +├── index.js # Generated Napi-RS entry +├── index.d.ts # Generated TypeScript definitions +└── __tests__/ # Vitest test suite +``` + +The crate lives at `bindings/node/` in the workspace, parallel to `bindings/python/`. Both depend on `amplifier-core` as a path dependency and wrap the same Rust kernel types. + +## Components + +### Build Infrastructure & Dependencies + +**Workspace setup:** +- New crate `bindings/node/` added to workspace `Cargo.toml` members list +- Napi-RS framework: `napi` + `napi-derive` crates, `napi-build` as build dependency +- npm package (name TBD — likely `@amplifier/core` or `amplifier-core`) + +**Dependency upgrades (batched with this phase):** +- `pyo3` → `0.28.2` in `bindings/python/Cargo.toml` (and `pyo3-async-runtimes` to match) — HIGH severity type confusion fix (Dependabot alert #1) +- `wasmtime` → latest (currently 42) in `crates/amplifier-core/Cargo.toml` — covers all 8 Dependabot alerts (6 medium, 2 low). WASM bridge API breakage must be fixed since wasmtime jumps from v29 to v42. + +**Generated outputs:** +- Napi-RS auto-generates `index.js` (native binding loader) and `index.d.ts` (TypeScript definitions) from `#[napi]` annotations +- Platform-specific `.node` binary + +**Crate dependencies:** +- `amplifier-core` (path dependency, same as Python binding) +- `napi` + `napi-derive` (Napi-RS framework) +- `tokio` (async runtime) +- `serde_json` (JSON bridging) +- `uuid` (session IDs) + +### TypeScript API Surface + +**Four classes exposed via `#[napi]`:** + +#### AmplifierSession — Primary Entry Point + +```typescript +interface SessionConfig { + providers?: Record; + tools?: Record; + orchestrator?: OrchestratorConfig; + context?: ContextConfig; + hooks?: HookConfig[]; + system_prompt?: string; + metadata?: Record; +} + +class AmplifierSession { + constructor(config: SessionConfig); + get sessionId(): string; + get parentId(): string | null; + get status(): SessionStatus; + get isInitialized(): boolean; + get coordinator(): Coordinator; + + async initialize(): Promise; + async execute(prompt: string): Promise; + async cleanup(): Promise; + + // Symbol.asyncDispose support + async [Symbol.asyncDispose](): Promise; +} +``` + +#### Coordinator — Module Mounting and Lifecycle + +```typescript +class Coordinator { + mountTool(name: string, tool: Tool): void; + mountProvider(name: string, provider: Provider): void; + setOrchestrator(orchestrator: Orchestrator): void; + setContext(context: ContextManager): void; + + getTool(name: string): Tool | null; + getProvider(name: string): Provider | null; + get tools(): string[]; + get providers(): string[]; + + get hooks(): HookRegistry; + get cancellation(): CancellationToken; + get config(): SessionConfig; + + registerCapability(name: string, value: T): void; + getCapability(name: string): T | null; + + async cleanup(): Promise; + resetTurn(): void; + toDict(): CoordinatorState; +} +``` + +#### HookRegistry — Event System + +```typescript +class HookRegistry { + register(event: string, handler: HookHandler): string; + unregister(handlerId: string): void; + async emit(event: string, data: HookEventData): Promise; + async emitAndCollect(event: string, data: HookEventData): Promise; + listHandlers(event?: string): string[]; + setDefaultFields(fields: Record): void; +} +``` + +#### CancellationToken — Cooperative Cancellation + +```typescript +class CancellationToken { + get isCancelled(): boolean; + get isGraceful(): boolean; + get isImmediate(): boolean; + requestGraceful(reason?: string): void; + requestImmediate(reason?: string): void; + reset(): void; + onCancel(callback: () => void): void; +} +``` + +**Six module interfaces (for module authors):** + +```typescript +interface Tool { + name: string; + description: string; + getSpec(): ToolSpec; + execute(params: Record): Promise; +} + +interface Provider { /* matching Rust Provider trait */ } +interface Orchestrator { /* matching Rust Orchestrator trait */ } +interface ContextManager { /* matching Rust ContextManager trait */ } +interface HookHandler { /* matching Rust HookHandler trait */ } +interface ApprovalProvider { /* matching Rust ApprovalProvider trait */ } +``` + +**Data model types — all typed, generated from Rust structs via `#[napi(object)]`:** + +```typescript +interface ToolSpec { + name: string; + description: string; + parameters: Record; // JSON Schema — intentionally loose +} + +interface ToolResult { + success: boolean; + output: string; + error?: string; + metadata?: Record; +} + +interface HookResult { + action: HookAction; + reason?: string; + contextInjection?: string; + contextInjectionRole?: ContextInjectionRole; + ephemeral?: boolean; + suppressOutput?: boolean; + userMessage?: string; + userMessageLevel?: UserMessageLevel; + userMessageSource?: string; + approvalPrompt?: string; + approvalOptions?: string[]; + approvalTimeout?: number; + approvalDefault?: ApprovalDefault; +} + +// Enums as string unions (TypeScript idiom) +type HookAction = 'continue' | 'inject_context' | 'ask_user' | 'deny'; +type Role = 'system' | 'user' | 'assistant' | 'tool'; +type SessionState = 'created' | 'initialized' | 'running' | 'completed' | 'failed'; +``` + +**Naming convention:** camelCase methods per TypeScript idiom. Napi-RS `#[napi]` handles Rust snake_case → JS camelCase automatically. + +**Typing rule:** Typed interfaces everywhere except where the schema is genuinely dynamic (JSON Schema for tool parameters, arbitrary metadata bags). Those use `Record` — still better than `any` because it signals "this is a dictionary, not a class." + +### Async Bridging & Runtime + +**Core challenge:** Rust tokio ↔ Node.js libuv event loop. + +**Approach (mirrors Python bridge strategy):** +- Napi-RS `AsyncTask` and `Task` traits bridge async Rust → JS Promises +- Each async Rust method becomes a `#[napi]` async method that spawns a tokio future and returns `Promise` to JS +- Tokio runtime initialized lazily on first use, shared across all calls (same pattern as `pyo3-async-runtimes`) +- JS callback bridging uses Napi-RS `ThreadsafeFunction` — equivalent of PyO3's `Py` callback pattern + +**Hook handler bridging:** +- JS functions registered as hook handlers get wrapped in `JsHookHandlerBridge` (Rust struct, mirrors `PyHookHandlerBridge`) +- Bridge holds a `ThreadsafeFunction` reference to the JS callback +- When Rust `HookRegistry` fires, it calls through the bridge back into JS +- Both sync and async JS handlers supported (detect via Promise return type) + +**Error bridging:** +- Rust `AmplifierError` variants → JS `Error` subclasses with typed `code` properties +- JS exceptions in module callbacks → caught at Napi-RS boundary, converted to `Result::Err` +- Same error taxonomy as Python: `ProviderError`, `ToolError`, `SessionError`, etc. + +## Data Flow + +The data flow mirrors the Python bridge exactly: + +1. **Session creation:** TS `new AmplifierSession(config)` → Napi-RS boundary → Rust `Session::new()` +2. **Module mounting:** TS `coordinator.mountTool(name, tool)` → JS-side `Map` stores the TS object (not sent to Rust) +3. **Execution:** TS `session.execute(prompt)` → Rust kernel orchestrates → calls back into JS via `ThreadsafeFunction` when it needs Tool/Provider execution → JS module runs → result crosses back through Napi-RS → Rust continues +4. **Hook emission:** Rust kernel fires hook → `JsHookHandlerBridge` calls JS handler via `ThreadsafeFunction` → JS handler returns `HookResult` → Rust processes result +5. **Cancellation:** TS `cancellation.requestGraceful()` → Rust `AtomicBool` set → checked cooperatively during execution loops + +## Error Handling + +- **Rust errors** cross the FFI boundary as typed JS `Error` subclasses with a `code` property matching the Rust variant name (`ProviderError`, `ToolError`, `SessionError`, etc.) +- **JS exceptions** thrown inside module callbacks (Tool.execute, Provider.generate, etc.) are caught at the Napi-RS boundary and converted to Rust `Result::Err` — they do not crash the process +- **Async errors** in Promises are propagated correctly — a rejected Promise in a JS hook handler becomes an `Err` in the Rust `HookRegistry` emission +- **Type mismatches** at the boundary (wrong config shape, missing required fields) are caught by Napi-RS's automatic deserialization and reported as clear `TypeError`s with field paths + +## Testing Strategy + +**Test parity target:** Prove the Napi-RS bindings work correctly, not retest the Rust kernel (which has its own 312 tests). + +| Layer | What | Framework | Count (est.) | +|---|---|---|---| +| Binding smoke tests | Each class instantiates, properties return correct types, async methods return Promises | Vitest | ~20 | +| Session lifecycle tests | new → initialize → execute → cleanup with mock modules | Vitest | ~10 | +| Module interface tests | TS objects implementing Tool/Provider/etc. mount correctly, get called, return typed results | Vitest | ~15 | +| Async bridging tests | Concurrent operations, cancellation mid-execution, error propagation across FFI | Vitest | ~10 | +| Type fidelity tests | Config types, HookResult fields, error codes serialize/deserialize correctly across boundary | Vitest | ~10 | + +**~65 tests total** focused on the bridge layer. + +**Framework:** Vitest (modern, fast, native TS support, good async testing). + +**NOT tested at the TS layer:** Kernel correctness (Rust tests), orchestrator loop behavior (Python orchestrator module tests), gRPC transport (deferred). + +## Deliverables + +1. `bindings/node/` — Napi-RS crate with 4 typed classes, 6 module interfaces, full data model types +2. `.d.ts` type definitions — auto-generated from `#[napi]` annotations +3. `package.json` — publishable npm package +4. ~65 Vitest tests covering the binding layer +5. Dependency upgrades — pyo3 → 0.28.2, wasmtime → latest (with WASM bridge API fixes) + +## Explicitly Not In Scope + +- gRPC bridge fidelity fixes (27 `TODO(grpc-v2)` markers — separate effort) +- `process_hook_result()` ported to Rust (deferred, tracked below) +- Cross-language module resolver (Phase 4) +- npm publishing pipeline / CI/CD for npm (follow-up) + +## Tracked Future Debt + +| # | Item | Description | Trigger | +|---|------|-------------|---------| +| Future TODO #1 | Unified Rust Module Storage | Consolidate per-language module dicts (Python `mount_points`, TS `Map`) into Rust `Arc` slots on the Coordinator. Reduces N×M maintenance cost as languages × trait changes grow. Currently each language independently stores module objects in its own runtime. | Third language binding (Go/C#) added, or trait surface starts evolving again | +| Future TODO #2 | Rust-native `process_hook_result()` | Port hook result routing logic (context injection, approval gates, user messages, output suppression) from Python `_rust_wrappers.py:ModuleCoordinator` into the Rust kernel. Currently ~185 lines of Python that every orchestrator calls after every `hooks.emit()`. Requires `DisplaySystem` trait in Rust, wiring approval/context through Rust typed slots. | First TypeScript orchestrator written, or after TODO #1 lands (which solves the subsystem access problem) | +| Future TODO #3 | Split `bindings/node/src/lib.rs` | Split single-file Napi-RS binding into `src/session.rs`, `src/coordinator.rs`, `src/hooks.rs`, `src/cancellation.rs`, `src/types.rs` for navigability. Single-file pattern is proven from Python bridge but may outgrow maintainability. | File exceeds ~3,000 lines | + +## Key Design Decisions + +1. **Napi-RS in-process bindings** (not gRPC) — zero-overhead FFI, same pattern as PyO3 +2. **Hybrid coordinator pattern** — JS-side `Map` for module storage, Rust kernel for config/tracking/cancellation (mirrors Python, pragmatic for ship speed) +3. **Deferred gRPC bridge fidelity fixes** — TS in-process modules don't hit the wire, so no data loss; gRPC fixes are a separate effort +4. **Deferred `process_hook_result()` to Rust** — callable from Python only today; TS orchestrators are future use case; tracked as debt +5. **Single `lib.rs`** — YAGNI, split later when needed (Future TODO #3) +6. **Fully typed API surface** — typed interfaces for configs, results, events (not `object`/`any`) to maximize AI-assist and IDE value +7. **Dependency upgrades batched** — pyo3 + wasmtime security fixes in the first task since we're touching Cargo.toml anyway + +## Relationship to Other Phases + +- **Phase 1 (complete):** Python/PyO3 bridge — the pattern we're mirroring +- **Phase 2 (this design):** TypeScript/Napi-RS bridge +- **Phase 3 (future):** Full WASM module loading via wasmtime component model +- **Phase 4 (future):** Cross-language module resolver — auto-detect language, pick transport +- **Phase 5 (future):** Go (CGo) and C# (P/Invoke) SDKs \ No newline at end of file diff --git a/docs/plans/2026-03-04-phase2-napi-rs-typescript-implementation.md b/docs/plans/2026-03-04-phase2-napi-rs-typescript-implementation.md new file mode 100644 index 0000000..6c5817b --- /dev/null +++ b/docs/plans/2026-03-04-phase2-napi-rs-typescript-implementation.md @@ -0,0 +1,2135 @@ +# Cross-Language SDK Phase 2: TypeScript/Napi-RS Bindings — Implementation Plan + +> **Execution:** Use the subagent-driven-development workflow to implement this plan. + +**Goal:** Deliver TypeScript/Node.js bindings for the amplifier-core Rust kernel via Napi-RS, enabling TypeScript host apps and in-process modules — while batching two dependency security upgrades (pyo3, wasmtime). + +**Architecture:** A single Napi-RS crate at `bindings/node/` mirrors the proven Python/PyO3 bridge pattern. Four classes (`AmplifierSession`, `Coordinator`, `HookRegistry`, `CancellationToken`) wrap the same Rust kernel types. Six module interfaces (`Tool`, `Provider`, `Orchestrator`, `ContextManager`, `HookHandler`, `ApprovalProvider`) use `ThreadsafeFunction` for JS↔Rust callback bridging. A hybrid coordinator stores JS module objects in a JS-side `Map` while the Rust kernel handles config, tracking, and cancellation. + +**Tech Stack:** Rust + Napi-RS (`napi` 2.x, `napi-derive`, `napi-build`), TypeScript + Node.js, Vitest for testing, tokio for async runtime. + +**Design doc:** `docs/plans/2026-03-04-phase2-napi-rs-typescript-design.md` + +--- + +## Orientation: What is this codebase? + +`amplifier-core` is a pure Rust kernel for modular AI agent orchestration. It has **zero** Python dependency — language bindings wrap it via FFI. The project structure: + +``` +amplifier-core/ +├── Cargo.toml # Workspace root (members: crates/amplifier-core, bindings/python) +├── crates/amplifier-core/ # The Rust kernel — all core types live here +│ └── src/ +│ ├── lib.rs # Re-exports everything +│ ├── session.rs # Session + SessionConfig +│ ├── coordinator.rs # Coordinator (module mount points) +│ ├── hooks.rs # HookRegistry (event dispatch) +│ ├── cancellation.rs # CancellationToken (cooperative cancel) +│ ├── traits.rs # 6 module traits: Tool, Provider, Orchestrator, ContextManager, HookHandler, ApprovalProvider +│ ├── models.rs # HookResult, ToolResult, HookAction, SessionState, etc. +│ ├── messages.rs # ChatRequest, ChatResponse, Message, Role, ToolSpec, etc. +│ ├── errors.rs # AmplifierError, ProviderError, ToolError, etc. +│ ├── events.rs # Event name constants (SESSION_START, TOOL_PRE, etc.) +│ └── bridges/wasm_tool.rs # WASM tool bridge (needs wasmtime upgrade fix) +├── bindings/python/ # PyO3 bridge — THE reference for our Napi-RS bridge +│ ├── Cargo.toml # pyo3 0.28 (needs bump to 0.28.2) +│ └── src/lib.rs # ~2,885 lines: PySession, PyCoordinator, PyHookRegistry, PyCancellationToken +└── bindings/node/ # ← WE ARE CREATING THIS +``` + +The Python bridge at `bindings/python/src/lib.rs` is the pattern we mirror for every task. + +--- + +## Task 0: Dependency Upgrades + +**Why:** pyo3 has a HIGH severity security fix, wasmtime has 8 Dependabot alerts. We batch these since we're touching Cargo.toml anyway. + +**Files:** +- Modify: `bindings/python/Cargo.toml` (pyo3 version bump) +- Modify: `crates/amplifier-core/Cargo.toml` (wasmtime version bump) +- Modify: `crates/amplifier-core/src/bridges/wasm_tool.rs` (fix API breakage) + +### Step 1: Bump pyo3 to 0.28.2 + +Open `bindings/python/Cargo.toml`. Change: + +```toml +# FROM: +pyo3 = { version = "0.28", features = ["generate-import-lib"] } +pyo3-async-runtimes = { version = "0.28", features = ["tokio-runtime"] } + +# TO: +pyo3 = { version = "0.28.2", features = ["generate-import-lib"] } +pyo3-async-runtimes = { version = "0.28.2", features = ["tokio-runtime"] } +``` + +### Step 2: Bump wasmtime to latest + +Open `crates/amplifier-core/Cargo.toml`. Change: + +```toml +# FROM: +wasmtime = { version = "29", optional = true } + +# TO: +wasmtime = { version = "31", optional = true } +``` + +> **Note:** We target v31, not v42. The wasmtime crate on crates.io shows v31 as latest stable at time of writing. Check `cargo search wasmtime` to confirm the actual latest version and adjust accordingly. The key point is: bump from v29 to whatever latest stable is available. + +### Step 3: Fix WASM bridge API breakage + +After bumping wasmtime, there may be API changes. The WASM bridge is minimal — it only uses `Engine::default()`, `Module::new()`, and `Module::name()`. Open `crates/amplifier-core/src/bridges/wasm_tool.rs` and check if these APIs still compile. + +The current code (which should still work, but verify): + +```rust +pub fn from_bytes(wasm_bytes: &[u8]) -> Result> { + let engine = wasmtime::Engine::default(); + let module = wasmtime::Module::new(&engine, wasm_bytes)?; + let name = module.name().unwrap_or("wasm-tool").to_string(); + // ... +} +``` + +If `Module::name()` signature changed (e.g., returns `&str` vs `Option<&str>`), fix accordingly. The wasmtime API between v29→v31 is usually source-compatible for these basics. + +### Step 4: Build and verify + +Run: +```bash +cd amplifier-core && cargo build --all-features 2>&1 +``` +Expected: Clean build with no errors. + +### Step 5: Run all Rust tests + +Run: +```bash +cd amplifier-core && cargo test --all 2>&1 +``` +Expected: All 312+ tests pass (the exact count may vary). + +### Step 6: Commit + +```bash +cd amplifier-core && git add bindings/python/Cargo.toml crates/amplifier-core/Cargo.toml crates/amplifier-core/src/bridges/wasm_tool.rs && git commit -m "chore: bump pyo3 to 0.28.2 and wasmtime to latest (security fixes)" +``` + +--- + +## Task 1: Napi-RS Scaffold + +**Why:** Create the empty `bindings/node/` crate with a single `#[napi]` function to prove the build pipeline works end-to-end: Rust compiles → native `.node` addon generated → `index.js` + `index.d.ts` auto-created → importable from Node.js. + +**Files:** +- Create: `bindings/node/Cargo.toml` +- Create: `bindings/node/src/lib.rs` +- Create: `bindings/node/build.rs` +- Create: `bindings/node/package.json` +- Create: `bindings/node/tsconfig.json` +- Create: `bindings/node/__tests__/smoke.test.ts` +- Modify: `Cargo.toml` (workspace root — add member) + +### Step 1: Add bindings/node to workspace members + +Open the workspace root `Cargo.toml`. Change: + +```toml +# FROM: +[workspace] +members = [ + "crates/amplifier-core", + "bindings/python", +] + +# TO: +[workspace] +members = [ + "crates/amplifier-core", + "bindings/python", + "bindings/node", +] +``` + +### Step 2: Create bindings/node/Cargo.toml + +Create the file `bindings/node/Cargo.toml`: + +```toml +[package] +name = "amplifier-core-node" +version = "1.0.10" +edition = "2021" +description = "Napi-RS bridge for amplifier-core Rust kernel" +license = "MIT" +publish = false + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-core = { path = "../../crates/amplifier-core" } +napi = { version = "2", features = ["async", "serde-json", "napi9"] } +napi-derive = "2" +tokio = { version = "1", features = ["rt-multi-thread"] } +serde_json = "1" +uuid = { version = "1", features = ["v4"] } + +[build-dependencies] +napi-build = "2" +``` + +### Step 3: Create bindings/node/build.rs + +Create the file `bindings/node/build.rs`: + +```rust +extern crate napi_build; + +fn main() { + napi_build::setup(); +} +``` + +### Step 4: Create minimal bindings/node/src/lib.rs + +Create the file `bindings/node/src/lib.rs`: + +```rust +//! Napi-RS bridge for amplifier-core. +//! +//! This crate wraps the pure Rust kernel types and exposes them +//! as JavaScript/TypeScript classes via Napi-RS. It compiles into +//! a native `.node` addon that ships inside an npm package. +//! +//! # Exposed classes +//! +//! | TypeScript name | Rust wrapper | Inner type | +//! |-------------------------|---------------------------|-----------------------------------| +//! | `AmplifierSession` | `JsSession` | `amplifier_core::Session` | +//! | `HookRegistry` | `JsHookRegistry` | `amplifier_core::HookRegistry` | +//! | `CancellationToken` | `JsCancellationToken` | `amplifier_core::CancellationToken` | +//! | `Coordinator` | `JsCoordinator` | `amplifier_core::Coordinator` | + +#[macro_use] +extern crate napi_derive; + +/// Smoke test: returns a greeting string from the native addon. +/// Remove this once real bindings are in place. +#[napi] +pub fn hello() -> String { + "Hello from amplifier-core native addon!".to_string() +} +``` + +### Step 5: Create bindings/node/package.json + +Create the file `bindings/node/package.json`: + +```json +{ + "name": "amplifier-core", + "version": "1.0.10", + "description": "TypeScript/Node.js bindings for amplifier-core Rust kernel", + "main": "index.js", + "types": "index.d.ts", + "scripts": { + "build": "napi build --release --platform", + "build:debug": "napi build --platform", + "test": "vitest run" + }, + "napi": { + "name": "amplifier-core", + "triples": {} + }, + "license": "MIT", + "devDependencies": { + "@napi-rs/cli": "^2", + "vitest": "^3", + "typescript": "^5" + } +} +``` + +### Step 6: Create bindings/node/tsconfig.json + +Create the file `bindings/node/tsconfig.json`: + +```json +{ + "compilerOptions": { + "target": "ES2022", + "module": "node16", + "moduleResolution": "node16", + "strict": true, + "esModuleInterop": true, + "outDir": "dist", + "declaration": true, + "types": ["vitest/globals"] + }, + "include": ["__tests__/**/*.ts"] +} +``` + +### Step 7: Install npm dependencies and build + +Run: +```bash +cd amplifier-core/bindings/node && npm install && npm run build:debug 2>&1 +``` +Expected: Build succeeds. You should see `amplifier-core.linux-arm64-gnu.node` (or similar platform-specific name) in the directory. Napi-RS also generates `index.js` and `index.d.ts`. + +### Step 8: Write the smoke test + +Create the file `bindings/node/__tests__/smoke.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { hello } from '../index.js'; + +describe('native addon smoke test', () => { + it('loads the native addon and calls hello()', () => { + const result = hello(); + expect(result).toBe('Hello from amplifier-core native addon!'); + }); +}); +``` + +### Step 9: Run the smoke test + +Run: +```bash +cd amplifier-core/bindings/node && npx vitest run 2>&1 +``` +Expected: 1 test passes. + +### Step 10: Commit + +```bash +cd amplifier-core && git add Cargo.toml bindings/node/ && git commit -m "feat(node): scaffold Napi-RS crate with smoke test" +``` + +--- + +## Task 2: Data Model Types + +**Why:** All other tasks depend on these types. Enums become TypeScript string unions via `#[napi(string_enum)]`. Structs become TypeScript interfaces via `#[napi(object)]`. This establishes the typed data contract across the FFI boundary. + +**Files:** +- Modify: `bindings/node/src/lib.rs` +- Create: `bindings/node/__tests__/types.test.ts` + +### Step 1: Write the failing test + +Create the file `bindings/node/__tests__/types.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { + HookAction, + SessionState, + ContextInjectionRole, + ApprovalDefault, + UserMessageLevel, + Role, +} from '../index.js'; + +describe('enum types', () => { + it('HookAction has all variants', () => { + expect(HookAction.Continue).toBe('Continue'); + expect(HookAction.Deny).toBe('Deny'); + expect(HookAction.Modify).toBe('Modify'); + expect(HookAction.InjectContext).toBe('InjectContext'); + expect(HookAction.AskUser).toBe('AskUser'); + }); + + it('SessionState has all variants', () => { + expect(SessionState.Running).toBe('Running'); + expect(SessionState.Completed).toBe('Completed'); + expect(SessionState.Failed).toBe('Failed'); + expect(SessionState.Cancelled).toBe('Cancelled'); + }); + + it('ContextInjectionRole has all variants', () => { + expect(ContextInjectionRole.System).toBe('System'); + expect(ContextInjectionRole.User).toBe('User'); + expect(ContextInjectionRole.Assistant).toBe('Assistant'); + }); + + it('ApprovalDefault has all variants', () => { + expect(ApprovalDefault.Allow).toBe('Allow'); + expect(ApprovalDefault.Deny).toBe('Deny'); + }); + + it('UserMessageLevel has all variants', () => { + expect(UserMessageLevel.Info).toBe('Info'); + expect(UserMessageLevel.Warning).toBe('Warning'); + expect(UserMessageLevel.Error).toBe('Error'); + }); + + it('Role has all variants', () => { + expect(Role.System).toBe('System'); + expect(Role.User).toBe('User'); + expect(Role.Assistant).toBe('Assistant'); + expect(Role.Tool).toBe('Tool'); + }); +}); +``` + +### Step 2: Run test to verify it fails + +Run: +```bash +cd amplifier-core/bindings/node && npx vitest run __tests__/types.test.ts 2>&1 +``` +Expected: FAIL — imports don't exist yet. + +### Step 3: Implement the enums in lib.rs + +Open `bindings/node/src/lib.rs`. Add the enum definitions after the `hello()` function: + +```rust +// --------------------------------------------------------------------------- +// Enums — TypeScript string enums via #[napi(string_enum)] +// --------------------------------------------------------------------------- + +/// Action type for hook results. +#[napi(string_enum)] +pub enum HookAction { + Continue, + Deny, + Modify, + InjectContext, + AskUser, +} + +/// Session lifecycle state. +#[napi(string_enum)] +pub enum SessionState { + Running, + Completed, + Failed, + Cancelled, +} + +/// Role for context injection messages. +#[napi(string_enum)] +pub enum ContextInjectionRole { + System, + User, + Assistant, +} + +/// Default decision on approval timeout. +#[napi(string_enum)] +pub enum ApprovalDefault { + Allow, + Deny, +} + +/// Severity level for user messages from hooks. +#[napi(string_enum)] +pub enum UserMessageLevel { + Info, + Warning, + Error, +} + +/// Message role in conversation. +#[napi(string_enum)] +pub enum Role { + System, + Developer, + User, + Assistant, + Function, + Tool, +} + +// --------------------------------------------------------------------------- +// Conversion helpers: Napi enums ↔ amplifier_core enums +// --------------------------------------------------------------------------- + +impl From for amplifier_core::models::HookAction { + fn from(val: HookAction) -> Self { + match val { + HookAction::Continue => amplifier_core::models::HookAction::Continue, + HookAction::Deny => amplifier_core::models::HookAction::Deny, + HookAction::Modify => amplifier_core::models::HookAction::Modify, + HookAction::InjectContext => amplifier_core::models::HookAction::InjectContext, + HookAction::AskUser => amplifier_core::models::HookAction::AskUser, + } + } +} + +impl From for HookAction { + fn from(val: amplifier_core::models::HookAction) -> Self { + match val { + amplifier_core::models::HookAction::Continue => HookAction::Continue, + amplifier_core::models::HookAction::Deny => HookAction::Deny, + amplifier_core::models::HookAction::Modify => HookAction::Modify, + amplifier_core::models::HookAction::InjectContext => HookAction::InjectContext, + amplifier_core::models::HookAction::AskUser => HookAction::AskUser, + } + } +} + +impl From for amplifier_core::models::SessionState { + fn from(val: SessionState) -> Self { + match val { + SessionState::Running => amplifier_core::models::SessionState::Running, + SessionState::Completed => amplifier_core::models::SessionState::Completed, + SessionState::Failed => amplifier_core::models::SessionState::Failed, + SessionState::Cancelled => amplifier_core::models::SessionState::Cancelled, + } + } +} + +impl From for SessionState { + fn from(val: amplifier_core::models::SessionState) -> Self { + match val { + amplifier_core::models::SessionState::Running => SessionState::Running, + amplifier_core::models::SessionState::Completed => SessionState::Completed, + amplifier_core::models::SessionState::Failed => SessionState::Failed, + amplifier_core::models::SessionState::Cancelled => SessionState::Cancelled, + } + } +} + +// --------------------------------------------------------------------------- +// Structs — TypeScript interfaces via #[napi(object)] +// --------------------------------------------------------------------------- + +/// Tool execution result — crosses the FFI boundary as a plain JS object. +#[napi(object)] +pub struct JsToolResult { + pub success: bool, + pub output: Option, + pub error: Option, +} + +/// Tool specification — describes a tool's interface. +#[napi(object)] +pub struct JsToolSpec { + pub name: String, + pub description: Option, + /// JSON Schema parameters as a JSON string. + pub parameters_json: String, +} + +/// Hook result — the return value from hook handlers. +#[napi(object)] +pub struct JsHookResult { + pub action: HookAction, + pub reason: Option, + pub context_injection: Option, + pub context_injection_role: Option, + pub ephemeral: Option, + pub suppress_output: Option, + pub user_message: Option, + pub user_message_level: Option, + pub user_message_source: Option, + pub approval_prompt: Option, + pub approval_timeout: Option, + pub approval_default: Option, +} + +/// Session configuration — typed config for AmplifierSession constructor. +#[napi(object)] +pub struct JsSessionConfig { + /// Full config as a JSON string. The Rust kernel parses and validates it. + pub config_json: String, +} +``` + +### Step 4: Rebuild and run tests + +Run: +```bash +cd amplifier-core/bindings/node && npm run build:debug && npx vitest run __tests__/types.test.ts 2>&1 +``` +Expected: All enum tests pass. + +### Step 5: Commit + +```bash +cd amplifier-core && git add bindings/node/ && git commit -m "feat(node): add data model types — enums and structs" +``` + +--- + +## Task 3: CancellationToken + +**Why:** Simplest of the four classes — no async, no subsystem dependencies. Perfect starting point to prove the `#[napi]` class pattern works. + +**Reference:** The Rust type is `amplifier_core::CancellationToken` in `crates/amplifier-core/src/cancellation.rs`. It uses `Arc>` internally and is already `Clone + Send + Sync`. The Python equivalent is `PyCancellationToken` in `bindings/python/src/lib.rs`. + +**Files:** +- Modify: `bindings/node/src/lib.rs` +- Create: `bindings/node/__tests__/cancellation.test.ts` + +### Step 1: Write the failing test + +Create the file `bindings/node/__tests__/cancellation.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { JsCancellationToken } from '../index.js'; + +describe('CancellationToken', () => { + it('creates with default state (not cancelled)', () => { + const token = new JsCancellationToken(); + expect(token.isCancelled).toBe(false); + expect(token.isGraceful).toBe(false); + expect(token.isImmediate).toBe(false); + }); + + it('requestGraceful transitions to graceful', () => { + const token = new JsCancellationToken(); + token.requestGraceful(); + expect(token.isCancelled).toBe(true); + expect(token.isGraceful).toBe(true); + expect(token.isImmediate).toBe(false); + }); + + it('requestImmediate transitions to immediate', () => { + const token = new JsCancellationToken(); + token.requestImmediate(); + expect(token.isCancelled).toBe(true); + expect(token.isImmediate).toBe(true); + }); + + it('graceful then immediate escalates', () => { + const token = new JsCancellationToken(); + token.requestGraceful(); + expect(token.isGraceful).toBe(true); + token.requestImmediate(); + expect(token.isImmediate).toBe(true); + }); + + it('reset returns to uncancelled state', () => { + const token = new JsCancellationToken(); + token.requestGraceful(); + expect(token.isCancelled).toBe(true); + token.reset(); + expect(token.isCancelled).toBe(false); + }); + + it('requestGraceful with reason', () => { + const token = new JsCancellationToken(); + token.requestGraceful('user pressed Ctrl+C'); + expect(token.isGraceful).toBe(true); + }); + + it('requestImmediate with reason', () => { + const token = new JsCancellationToken(); + token.requestImmediate('timeout exceeded'); + expect(token.isImmediate).toBe(true); + }); +}); +``` + +### Step 2: Run test to verify it fails + +Run: +```bash +cd amplifier-core/bindings/node && npx vitest run __tests__/cancellation.test.ts 2>&1 +``` +Expected: FAIL — `JsCancellationToken` doesn't exist yet. + +### Step 3: Implement JsCancellationToken + +Open `bindings/node/src/lib.rs`. Add: + +```rust +use std::sync::Arc; + +// --------------------------------------------------------------------------- +// JsCancellationToken — wraps amplifier_core::CancellationToken +// --------------------------------------------------------------------------- + +/// Cooperative cancellation token. +/// +/// State machine: None → Graceful → Immediate. +/// Thread-safe: backed by Arc in the Rust kernel. +#[napi] +pub struct JsCancellationToken { + inner: amplifier_core::CancellationToken, +} + +#[napi] +impl JsCancellationToken { + /// Create a new token in the uncancelled state. + #[napi(constructor)] + pub fn new() -> Self { + Self { + inner: amplifier_core::CancellationToken::new(), + } + } + + /// Create from an existing Rust CancellationToken (internal use). + pub fn from_inner(inner: amplifier_core::CancellationToken) -> Self { + Self { inner } + } + + /// True if any cancellation has been requested (graceful or immediate). + #[napi(getter)] + pub fn is_cancelled(&self) -> bool { + self.inner.is_cancelled() + } + + /// True if graceful cancellation (wait for current tools to complete). + #[napi(getter)] + pub fn is_graceful(&self) -> bool { + self.inner.is_graceful() + } + + /// True if immediate cancellation (stop now). + #[napi(getter)] + pub fn is_immediate(&self) -> bool { + self.inner.is_immediate() + } + + /// Request graceful cancellation. Waits for current tools to complete. + #[napi] + pub fn request_graceful(&self, _reason: Option) { + self.inner.request_graceful(); + } + + /// Request immediate cancellation. Stops as soon as possible. + #[napi] + pub fn request_immediate(&self, _reason: Option) { + self.inner.request_immediate(); + } + + /// Reset cancellation state. Called at turn boundaries. + #[napi] + pub fn reset(&self) { + self.inner.reset(); + } +} +``` + +> **Note:** The `_reason` parameter is accepted but not yet stored (matching the current Rust kernel API which doesn't have a reason field). This is forward-compatible with a future enhancement. + +### Step 4: Rebuild and run tests + +Run: +```bash +cd amplifier-core/bindings/node && npm run build:debug && npx vitest run __tests__/cancellation.test.ts 2>&1 +``` +Expected: All 7 tests pass. + +### Step 5: Commit + +```bash +cd amplifier-core && git add bindings/node/ && git commit -m "feat(node): add CancellationToken binding" +``` + +--- + +## Task 4: HookRegistry + +**Why:** The hook system is the event backbone of the kernel. This task wraps `amplifier_core::HookRegistry` and implements `JsHookHandlerBridge` — the struct that lets JS functions act as Rust `HookHandler` trait objects via `ThreadsafeFunction`. + +**Reference:** The Rust type is `amplifier_core::HookRegistry` in `crates/amplifier-core/src/hooks.rs`. The Python equivalent is `PyHookRegistry` + `PyHookHandlerBridge` in `bindings/python/src/lib.rs` (lines 53–181). + +**Files:** +- Modify: `bindings/node/src/lib.rs` +- Create: `bindings/node/__tests__/hooks.test.ts` + +### Step 1: Write the failing test + +Create the file `bindings/node/__tests__/hooks.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { JsHookRegistry, HookAction } from '../index.js'; + +describe('HookRegistry', () => { + it('creates empty registry', () => { + const registry = new JsHookRegistry(); + const handlers = registry.listHandlers(); + expect(Object.keys(handlers).length).toBe(0); + }); + + it('emits with no handlers returns Continue', async () => { + const registry = new JsHookRegistry(); + const result = await registry.emit('test:event', '{}'); + expect(result.action).toBe(HookAction.Continue); + }); + + it('registers and emits to a JS handler', async () => { + const registry = new JsHookRegistry(); + let handlerCalled = false; + let receivedEvent = ''; + + registry.register('test:event', (_event: string, _data: string) => { + handlerCalled = true; + receivedEvent = _event; + return JSON.stringify({ action: 'continue' }); + }, 0, 'test-handler'); + + await registry.emit('test:event', JSON.stringify({ key: 'value' })); + expect(handlerCalled).toBe(true); + expect(receivedEvent).toBe('test:event'); + }); + + it('listHandlers returns registered handler names', () => { + const registry = new JsHookRegistry(); + registry.register('tool:pre', (_e: string, _d: string) => { + return JSON.stringify({ action: 'continue' }); + }, 0, 'my-hook'); + + const handlers = registry.listHandlers(); + expect(handlers['tool:pre']).toBeDefined(); + expect(handlers['tool:pre']).toContain('my-hook'); + }); + + it('handler returning deny stops pipeline', async () => { + const registry = new JsHookRegistry(); + registry.register('test:event', (_e: string, _d: string) => { + return JSON.stringify({ action: 'deny', reason: 'blocked' }); + }, 0, 'denier'); + + const result = await registry.emit('test:event', '{}'); + expect(result.action).toBe(HookAction.Deny); + expect(result.reason).toBe('blocked'); + }); + + it('setDefaultFields merges into emit data', async () => { + const registry = new JsHookRegistry(); + let receivedData = ''; + + registry.register('test:event', (_e: string, data: string) => { + receivedData = data; + return JSON.stringify({ action: 'continue' }); + }, 0, 'capture'); + + registry.setDefaultFields(JSON.stringify({ session_id: 'test-123' })); + await registry.emit('test:event', JSON.stringify({ custom: true })); + + const parsed = JSON.parse(receivedData); + expect(parsed.session_id).toBe('test-123'); + expect(parsed.custom).toBe(true); + }); +}); +``` + +### Step 2: Run test to verify it fails + +Run: +```bash +cd amplifier-core/bindings/node && npx vitest run __tests__/hooks.test.ts 2>&1 +``` +Expected: FAIL — `JsHookRegistry` doesn't exist yet. + +### Step 3: Implement JsHookHandlerBridge and JsHookRegistry + +Open `bindings/node/src/lib.rs`. Add these imports at the top (merge with existing): + +```rust +use std::collections::HashMap; +use std::future::Future; +use std::pin::Pin; + +use napi::threadsafe_function::{ThreadsafeFunction, ErrorStrategy, ThreadSafeCallContext}; +use napi::bindgen_prelude::*; + +use amplifier_core::errors::HookError; +use amplifier_core::models::HookResult; +use amplifier_core::traits::HookHandler; +``` + +Then add the bridge and registry: + +```rust +// --------------------------------------------------------------------------- +// JsHookHandlerBridge — wraps a JS callback as a Rust HookHandler +// --------------------------------------------------------------------------- + +/// Bridges a JavaScript function into the Rust HookHandler trait. +/// +/// Holds a ThreadsafeFunction reference to the JS callback. When the Rust +/// HookRegistry fires an event, it calls through this bridge back into JS. +/// +/// The JS callback signature is: (event: string, data: string) => string +/// where `data` and the return value are JSON strings. +struct JsHookHandlerBridge { + callback: ThreadsafeFunction<(String, String), ErrorStrategy::Fatal>, +} + +unsafe impl Send for JsHookHandlerBridge {} +unsafe impl Sync for JsHookHandlerBridge {} + +impl HookHandler for JsHookHandlerBridge { + fn handle( + &self, + event: &str, + data: serde_json::Value, + ) -> Pin> + Send + '_>> { + let event = event.to_string(); + let data_str = serde_json::to_string(&data).unwrap_or_else(|_| "{}".to_string()); + let callback = self.callback.clone(); + + Box::pin(async move { + // Call into JS via ThreadsafeFunction + let result_str: String = callback + .call_async((event.clone(), data_str)) + .await + .map_err(|e| HookError::HandlerFailed { + message: format!("JS hook handler error: {e}"), + handler_name: None, + })?; + + // Parse the JSON string returned by JS into a HookResult + let hook_result: HookResult = + serde_json::from_str(&result_str).unwrap_or_else(|e| { + log::warn!( + "Failed to parse JS hook handler result (defaulting to Continue): {e}" + ); + HookResult::default() + }); + + Ok(hook_result) + }) + } +} + +// --------------------------------------------------------------------------- +// JsHookRegistry — wraps amplifier_core::HookRegistry +// --------------------------------------------------------------------------- + +/// Hook event dispatch registry. +/// +/// Handlers execute sequentially by priority. Deny short-circuits the chain. +#[napi] +pub struct JsHookRegistry { + pub(crate) inner: Arc, +} + +#[napi] +impl JsHookRegistry { + /// Create an empty hook registry. + #[napi(constructor)] + pub fn new() -> Self { + Self { + inner: Arc::new(amplifier_core::HookRegistry::new()), + } + } + + /// Create from an existing Rust HookRegistry (internal use). + pub fn from_inner(inner: &lifier_core::HookRegistry) -> Self { + // Note: HookRegistry is not Clone, so we can't wrap an existing one. + // For coordinator integration, we'll need a different approach. + // For now, this creates a new one. + Self { + inner: Arc::new(amplifier_core::HookRegistry::new()), + } + } + + /// Register a JS function as a hook handler. + /// + /// The callback signature is: (event: string, dataJson: string) => string + /// It must return a JSON string of a HookResult. + #[napi(ts_args_type = "event: string, handler: (event: string, dataJson: string) => string, priority: number, name: string")] + pub fn register( + &self, + event: String, + handler: JsFunction, + priority: i32, + name: String, + ) -> Result<()> { + // Create a ThreadsafeFunction from the JS callback + let tsfn: ThreadsafeFunction<(String, String), ErrorStrategy::Fatal> = handler + .create_threadsafe_function(0, |ctx: ThreadSafeCallContext<(String, String)>| { + let env = ctx.env; + let (event, data) = ctx.value; + Ok(vec![ + env.create_string(&event)?.into_unknown(), + env.create_string(&data)?.into_unknown(), + ]) + })?; + + let bridge = Arc::new(JsHookHandlerBridge { callback: tsfn }); + + self.inner + .register(&event, bridge, priority, Some(name)); + + Ok(()) + } + + /// Emit an event. Returns the aggregated HookResult as a JsHookResult. + /// + /// `data_json` is a JSON string of the event payload. + #[napi] + pub async fn emit(&self, event: String, data_json: String) -> Result { + let data: serde_json::Value = + serde_json::from_str(&data_json).unwrap_or(serde_json::json!({})); + + let result = self.inner.emit(&event, data).await; + + Ok(hook_result_to_js(result)) + } + + /// List all registered handlers, grouped by event name. + /// + /// Returns an object where keys are event names and values are arrays of handler names. + #[napi] + pub fn list_handlers(&self) -> HashMap> { + self.inner.list_handlers(None) + } + + /// Set default fields merged into every emit() call. + /// + /// `defaults_json` is a JSON string of the default fields. + #[napi] + pub fn set_default_fields(&self, defaults_json: String) { + if let Ok(defaults) = serde_json::from_str(&defaults_json) { + self.inner.set_default_fields(defaults); + } + } +} + +/// Convert a Rust HookResult to a JS-friendly JsHookResult. +fn hook_result_to_js(result: HookResult) -> JsHookResult { + JsHookResult { + action: result.action.into(), + reason: result.reason, + context_injection: result.context_injection, + context_injection_role: result.context_injection_role.into(), + ephemeral: Some(result.ephemeral), + suppress_output: Some(result.suppress_output), + user_message: result.user_message, + user_message_level: Some(result.user_message_level.into()), + user_message_source: result.user_message_source, + approval_prompt: result.approval_prompt, + approval_timeout: Some(result.approval_timeout), + approval_default: Some(result.approval_default.into()), + } +} +``` + +> **Note:** You will also need `From` implementations for `ContextInjectionRole`, `UserMessageLevel`, and `ApprovalDefault` following the same pattern as the `HookAction` converters added in Task 2. Add those conversion impls alongside the existing ones. + +### Step 4: Rebuild and run tests + +Run: +```bash +cd amplifier-core/bindings/node && npm run build:debug && npx vitest run __tests__/hooks.test.ts 2>&1 +``` +Expected: All 6 tests pass. + +### Step 5: Commit + +```bash +cd amplifier-core && git add bindings/node/ && git commit -m "feat(node): add HookRegistry binding with JS handler bridge" +``` + +--- + +## Task 5: Coordinator + +**Why:** The Coordinator is the central hub — it holds module mount points, capabilities, the hook registry, the cancellation token, and config. This is the "hybrid coordinator" pattern: JS-side storage for TS module objects, Rust kernel for everything else. + +**Reference:** The Rust type is `amplifier_core::Coordinator` in `crates/amplifier-core/src/coordinator.rs`. The Python equivalent is `PyCoordinator` in `bindings/python/src/lib.rs`. + +**Files:** +- Modify: `bindings/node/src/lib.rs` +- Create: `bindings/node/__tests__/coordinator.test.ts` + +### Step 1: Write the failing test + +Create the file `bindings/node/__tests__/coordinator.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { JsCoordinator } from '../index.js'; + +describe('Coordinator', () => { + it('creates with empty config', () => { + const coord = new JsCoordinator('{}'); + expect(coord.toolNames).toEqual([]); + expect(coord.providerNames).toEqual([]); + expect(coord.hasOrchestrator).toBe(false); + expect(coord.hasContext).toBe(false); + }); + + it('registers and retrieves capabilities', () => { + const coord = new JsCoordinator('{}'); + coord.registerCapability('streaming', JSON.stringify(true)); + const cap = coord.getCapability('streaming'); + expect(cap).toBe('true'); + }); + + it('getCapability returns null for missing', () => { + const coord = new JsCoordinator('{}'); + expect(coord.getCapability('nonexistent')).toBeNull(); + }); + + it('provides access to hooks subsystem', () => { + const coord = new JsCoordinator('{}'); + const hooks = coord.hooks; + expect(hooks).toBeDefined(); + expect(typeof hooks.listHandlers).toBe('function'); + }); + + it('provides access to cancellation subsystem', () => { + const coord = new JsCoordinator('{}'); + const cancel = coord.cancellation; + expect(cancel).toBeDefined(); + expect(cancel.isCancelled).toBe(false); + }); + + it('resetTurn resets turn tracking', () => { + const coord = new JsCoordinator('{}'); + // Should not throw + coord.resetTurn(); + }); + + it('toDict returns coordinator state', () => { + const coord = new JsCoordinator('{}'); + const dict = coord.toDict(); + expect(dict).toHaveProperty('tools'); + expect(dict).toHaveProperty('providers'); + expect(dict).toHaveProperty('has_orchestrator'); + expect(dict).toHaveProperty('has_context'); + expect(dict).toHaveProperty('capabilities'); + }); + + it('config returns original config', () => { + const configJson = JSON.stringify({ session: { orchestrator: 'test' } }); + const coord = new JsCoordinator(configJson); + const config = coord.config; + expect(config).toBeDefined(); + }); +}); +``` + +### Step 2: Run test to verify it fails + +Run: +```bash +cd amplifier-core/bindings/node && npx vitest run __tests__/coordinator.test.ts 2>&1 +``` +Expected: FAIL — `JsCoordinator` doesn't exist yet. + +### Step 3: Implement JsCoordinator + +Open `bindings/node/src/lib.rs`. Add: + +```rust +// --------------------------------------------------------------------------- +// JsCoordinator — wraps amplifier_core::Coordinator +// --------------------------------------------------------------------------- + +/// Central coordination hub for module mount points, capabilities, and services. +/// +/// The hybrid coordinator pattern: JS-side storage for TS module objects +/// (tools, providers, orchestrator, context), Rust kernel for config, +/// tracking, hooks, and cancellation. +#[napi] +pub struct JsCoordinator { + pub(crate) inner: Arc, +} + +#[napi] +impl JsCoordinator { + /// Create a new coordinator with the given config JSON. + #[napi(constructor)] + pub fn new(config_json: String) -> Result { + let config: HashMap = + serde_json::from_str(&config_json).unwrap_or_default(); + Ok(Self { + inner: Arc::new(amplifier_core::Coordinator::new(config)), + }) + } + + /// Names of all mounted tools (from the Rust kernel side). + #[napi(getter)] + pub fn tool_names(&self) -> Vec { + self.inner.tool_names() + } + + /// Names of all mounted providers (from the Rust kernel side). + #[napi(getter)] + pub fn provider_names(&self) -> Vec { + self.inner.provider_names() + } + + /// Whether an orchestrator is mounted. + #[napi(getter)] + pub fn has_orchestrator(&self) -> bool { + self.inner.has_orchestrator() + } + + /// Whether a context manager is mounted. + #[napi(getter)] + pub fn has_context(&self) -> bool { + self.inner.has_context() + } + + /// Register a capability (inter-module communication). + #[napi] + pub fn register_capability(&self, name: String, value_json: String) { + if let Ok(value) = serde_json::from_str(&value_json) { + self.inner.register_capability(&name, value); + } + } + + /// Get a registered capability. Returns null if not found. + #[napi] + pub fn get_capability(&self, name: String) -> Option { + self.inner + .get_capability(&name) + .map(|v| serde_json::to_string(&v).unwrap_or_default()) + } + + /// Access the hook registry subsystem. + #[napi(getter)] + pub fn hooks(&self) -> JsHookRegistry { + // Note: This creates a new JsHookRegistry wrapper. For the coordinator's + // internal hooks to be shared, we need Arc access. The Coordinator exposes + // hooks() as &HookRegistry. For now, we create a separate registry. + // TODO: Share the actual HookRegistry once we have Arc. + JsHookRegistry::from_inner(self.inner.hooks()) + } + + /// Access the cancellation token subsystem. + #[napi(getter)] + pub fn cancellation(&self) -> JsCancellationToken { + JsCancellationToken::from_inner(self.inner.cancellation().clone()) + } + + /// Session configuration as JSON string. + #[napi(getter)] + pub fn config(&self) -> String { + serde_json::to_string(self.inner.config()).unwrap_or_else(|_| "{}".to_string()) + } + + /// Reset per-turn tracking. Call at turn boundaries. + #[napi] + pub fn reset_turn(&self) { + self.inner.reset_turn(); + } + + /// Return coordinator state as a JSON-compatible object. + #[napi] + pub fn to_dict(&self) -> HashMap { + self.inner.to_dict() + } + + /// Run all cleanup functions. + #[napi] + pub async fn cleanup(&self) { + self.inner.cleanup().await; + } +} +``` + +> **Important Note:** The `hooks()` getter currently creates a wrapper but cannot share the coordinator's internal `HookRegistry` because `Coordinator::hooks()` returns `&HookRegistry` (a reference). The `JsHookRegistry` needs to own or share the registry via `Arc`. This is a known limitation that gets resolved in Task 6 when the session wires everything together. For Task 5 tests, the coordinator's own hooks will work for capability/config tests, and the hooks getter returns a working (but separate) registry. Add a TODO comment in the code. + +### Step 4: Rebuild and run tests + +Run: +```bash +cd amplifier-core/bindings/node && npm run build:debug && npx vitest run __tests__/coordinator.test.ts 2>&1 +``` +Expected: All 9 tests pass. + +### Step 5: Commit + +```bash +cd amplifier-core && git add bindings/node/ && git commit -m "feat(node): add Coordinator binding with hybrid pattern" +``` + +--- + +## Task 6: AmplifierSession + +**Why:** The session is the top-level entry point for TypeScript consumers: `new AmplifierSession(config) → initialize() → execute(prompt) → cleanup()`. It wires together the Coordinator, HookRegistry, and CancellationToken. + +**Reference:** The Rust type is `amplifier_core::Session` in `crates/amplifier-core/src/session.rs`. The Python equivalent is `PySession` in `bindings/python/src/lib.rs` (lines 200–600+). + +**Files:** +- Modify: `bindings/node/src/lib.rs` +- Create: `bindings/node/__tests__/session.test.ts` + +### Step 1: Write the failing test + +Create the file `bindings/node/__tests__/session.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { JsAmplifierSession } from '../index.js'; + +describe('AmplifierSession', () => { + const validConfig = JSON.stringify({ + session: { + orchestrator: 'loop-basic', + context: 'context-simple', + }, + }); + + it('creates with valid config and generates session ID', () => { + const session = new JsAmplifierSession(validConfig); + expect(session.sessionId).toBeTruthy(); + expect(session.sessionId.length).toBeGreaterThan(0); + }); + + it('creates with custom session ID', () => { + const session = new JsAmplifierSession(validConfig, 'custom-id'); + expect(session.sessionId).toBe('custom-id'); + }); + + it('creates with parent ID', () => { + const session = new JsAmplifierSession(validConfig, undefined, 'parent-123'); + expect(session.parentId).toBe('parent-123'); + }); + + it('parentId is null when no parent', () => { + const session = new JsAmplifierSession(validConfig); + expect(session.parentId).toBeNull(); + }); + + it('starts as not initialized', () => { + const session = new JsAmplifierSession(validConfig); + expect(session.isInitialized).toBe(false); + }); + + it('status starts as running', () => { + const session = new JsAmplifierSession(validConfig); + expect(session.status).toBe('running'); + }); + + it('provides access to coordinator', () => { + const session = new JsAmplifierSession(validConfig); + const coord = session.coordinator; + expect(coord).toBeDefined(); + }); + + it('rejects empty config', () => { + expect(() => new JsAmplifierSession('{}')).toThrow(); + }); + + it('rejects config without orchestrator', () => { + const badConfig = JSON.stringify({ + session: { context: 'context-simple' }, + }); + expect(() => new JsAmplifierSession(badConfig)).toThrow(/orchestrator/); + }); + + it('rejects config without context', () => { + const badConfig = JSON.stringify({ + session: { orchestrator: 'loop-basic' }, + }); + expect(() => new JsAmplifierSession(badConfig)).toThrow(/context/); + }); + + it('cleanup clears initialized flag', async () => { + const session = new JsAmplifierSession(validConfig); + await session.cleanup(); + expect(session.isInitialized).toBe(false); + }); +}); +``` + +### Step 2: Run test to verify it fails + +Run: +```bash +cd amplifier-core/bindings/node && npx vitest run __tests__/session.test.ts 2>&1 +``` +Expected: FAIL — `JsAmplifierSession` doesn't exist yet. + +### Step 3: Implement JsAmplifierSession + +Open `bindings/node/src/lib.rs`. Add: + +```rust +use std::sync::Mutex; + +// --------------------------------------------------------------------------- +// JsAmplifierSession — wraps amplifier_core::Session +// --------------------------------------------------------------------------- + +/// Primary entry point for TypeScript consumers. +/// +/// Lifecycle: new(config) → initialize() → execute(prompt) → cleanup(). +#[napi] +pub struct JsAmplifierSession { + inner: Arc>, + /// Cached session_id (avoids locking inner for every access). + cached_session_id: String, + /// Cached parent_id. + cached_parent_id: Option, + /// Config JSON for coordinator construction. + config_json: String, +} + +#[napi] +impl JsAmplifierSession { + /// Create a new session. + /// + /// `config_json` must be a JSON string with at minimum: + /// `{ "session": { "orchestrator": "...", "context": "..." } }` + #[napi(constructor)] + pub fn new( + config_json: String, + session_id: Option, + parent_id: Option, + ) -> Result { + let value: serde_json::Value = serde_json::from_str(&config_json) + .map_err(|e| Error::from_reason(format!("Invalid config JSON: {e}")))?; + + let session_config = amplifier_core::SessionConfig::from_value(value) + .map_err(|e| Error::from_reason(format!("{e}")))?; + + let session = amplifier_core::Session::new( + session_config, + session_id.clone(), + parent_id.clone(), + ); + + let actual_id = session.session_id().to_string(); + let actual_parent = session.parent_id().map(|s| s.to_string()); + + Ok(Self { + inner: Arc::new(tokio::sync::Mutex::new(session)), + cached_session_id: actual_id, + cached_parent_id: actual_parent, + config_json, + }) + } + + /// The session ID (UUID string). + #[napi(getter)] + pub fn session_id(&self) -> &str { + &self.cached_session_id + } + + /// The parent session ID, if any. + #[napi(getter)] + pub fn parent_id(&self) -> Option { + self.cached_parent_id.clone() + } + + /// Whether the session has been initialized. + #[napi(getter)] + pub fn is_initialized(&self) -> bool { + // Use try_lock to avoid blocking the JS thread + match self.inner.try_lock() { + Ok(session) => session.is_initialized(), + Err(_) => false, + } + } + + /// Current session status string (running, completed, failed, cancelled). + #[napi(getter)] + pub fn status(&self) -> String { + match self.inner.try_lock() { + Ok(session) => session.status().to_string(), + Err(_) => "running".to_string(), + } + } + + /// Access the coordinator. + #[napi(getter)] + pub fn coordinator(&self) -> Result { + // Create a coordinator wrapper from the config. + // Note: This creates a separate coordinator instance. For shared state, + // the Session's internal coordinator needs Arc wrapping. + // This is a known limitation — see Future TODO #1 in design doc. + let config: HashMap = + serde_json::from_str(&self.config_json).unwrap_or_default(); + Ok(JsCoordinator { + inner: Arc::new(amplifier_core::Coordinator::new(config)), + }) + } + + /// Mark the session as initialized. + /// + /// In the Napi-RS binding, module loading happens in JS-land. + /// Call this after mounting modules via the coordinator. + #[napi] + pub fn set_initialized(&self) { + if let Ok(session) = self.inner.try_lock() { + session.set_initialized(); + } + } + + /// Clean up session resources. + #[napi] + pub async fn cleanup(&self) -> Result<()> { + let session = self.inner.lock().await; + session.cleanup().await; + Ok(()) + } +} +``` + +> **Known limitation:** The `coordinator()` getter creates a separate Coordinator instance. Sharing the Session's internal Coordinator requires restructuring the Rust kernel to use `Arc` — this is tracked as Future TODO #1 in the design doc. For the initial binding, JS-side module mounting and Rust kernel config/hooks/cancellation work independently, which matches the Python hybrid pattern. + +### Step 4: Rebuild and run tests + +Run: +```bash +cd amplifier-core/bindings/node && npm run build:debug && npx vitest run __tests__/session.test.ts 2>&1 +``` +Expected: All 11 tests pass. + +### Step 5: Commit + +```bash +cd amplifier-core && git add bindings/node/ && git commit -m "feat(node): add AmplifierSession binding" +``` + +--- + +## Task 7: Module Interfaces + +**Why:** Module interfaces let TypeScript authors implement `Tool`, `Provider`, `Orchestrator`, etc. as plain TS objects and mount them in the coordinator. The bridge structs (`JsToolBridge`, `JsProviderBridge`, etc.) use `ThreadsafeFunction` to call from Rust back into JS. + +**Reference:** The 6 Rust traits are in `crates/amplifier-core/src/traits.rs`. The Python bridge defines `PyHookHandlerBridge` (which we already did in Task 4). This task adds `JsToolBridge` as the primary example — the others follow the same pattern. + +**Files:** +- Modify: `bindings/node/src/lib.rs` +- Create: `bindings/node/__tests__/modules.test.ts` + +### Step 1: Write the failing test + +Create the file `bindings/node/__tests__/modules.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { JsToolBridge } from '../index.js'; + +describe('Tool interface bridge', () => { + it('creates a JsToolBridge wrapping a TS tool object', () => { + const tool = new JsToolBridge( + 'echo', + 'Echoes input back', + JSON.stringify({ type: 'object', properties: { text: { type: 'string' } } }), + async (inputJson: string) => { + const input = JSON.parse(inputJson); + return JSON.stringify({ + success: true, + output: input.text || 'no input', + }); + }, + ); + + expect(tool.name).toBe('echo'); + expect(tool.description).toBe('Echoes input back'); + }); + + it('executes a tool through the bridge', async () => { + const tool = new JsToolBridge( + 'greet', + 'Greets a person', + '{}', + async (inputJson: string) => { + const input = JSON.parse(inputJson); + return JSON.stringify({ + success: true, + output: `Hello, ${input.name}!`, + }); + }, + ); + + const resultJson = await tool.execute(JSON.stringify({ name: 'World' })); + const result = JSON.parse(resultJson); + expect(result.success).toBe(true); + expect(result.output).toBe('Hello, World!'); + }); + + it('handles tool execution errors', async () => { + const tool = new JsToolBridge( + 'failing', + 'Always fails', + '{}', + async (_inputJson: string) => { + return JSON.stringify({ + success: false, + error: 'Something went wrong', + }); + }, + ); + + const resultJson = await tool.execute('{}'); + const result = JSON.parse(resultJson); + expect(result.success).toBe(false); + expect(result.error).toBe('Something went wrong'); + }); +}); +``` + +### Step 2: Run test to verify it fails + +Run: +```bash +cd amplifier-core/bindings/node && npx vitest run __tests__/modules.test.ts 2>&1 +``` +Expected: FAIL — `JsToolBridge` doesn't exist yet. + +### Step 3: Implement JsToolBridge + +Open `bindings/node/src/lib.rs`. Add: + +```rust +// --------------------------------------------------------------------------- +// JsToolBridge — wraps a JS tool implementation as a Napi class +// --------------------------------------------------------------------------- + +/// Bridge that wraps a TypeScript tool implementation. +/// +/// The TS side provides name, description, parameters schema, and an +/// async execute function. This class holds a ThreadsafeFunction to the +/// execute callback so Rust can call back into JS. +/// +/// In the hybrid coordinator pattern, these bridge objects are stored in +/// a JS-side Map (not in the Rust Coordinator). The JS orchestrator +/// retrieves them by name and calls execute() directly. +#[napi] +pub struct JsToolBridge { + tool_name: String, + tool_description: String, + parameters_json: String, + execute_fn: ThreadsafeFunction, +} + +#[napi] +impl JsToolBridge { + /// Create a new tool bridge. + /// + /// - `name`: Tool name (e.g., "bash", "read_file") + /// - `description`: Human-readable description + /// - `parameters_json`: JSON Schema for tool parameters + /// - `execute_fn`: Async function `(inputJson: string) => Promise` + /// that takes JSON input and returns JSON ToolResult + #[napi(constructor)] + #[napi(ts_args_type = "name: string, description: string, parametersJson: string, executeFn: (inputJson: string) => Promise")] + pub fn new( + name: String, + description: String, + parameters_json: String, + execute_fn: JsFunction, + ) -> Result { + let tsfn: ThreadsafeFunction = execute_fn + .create_threadsafe_function(0, |ctx: ThreadSafeCallContext| { + let env = ctx.env; + Ok(vec![env.create_string(&ctx.value)?.into_unknown()]) + })?; + + Ok(Self { + tool_name: name, + tool_description: description, + parameters_json, + execute_fn: tsfn, + }) + } + + /// The tool name. + #[napi(getter)] + pub fn name(&self) -> &str { + &self.tool_name + } + + /// The tool description. + #[napi(getter)] + pub fn description(&self) -> &str { + &self.tool_description + } + + /// Execute the tool with JSON input. Returns a JSON ToolResult string. + #[napi] + pub async fn execute(&self, input_json: String) -> Result { + let result = self + .execute_fn + .call_async(input_json) + .await + .map_err(|e| Error::from_reason(format!("Tool execution error: {e}")))?; + Ok(result) + } + + /// Get the tool spec as a JSON string. + #[napi] + pub fn get_spec(&self) -> String { + serde_json::json!({ + "name": self.tool_name, + "description": self.tool_description, + "parameters": serde_json::from_str::(&self.parameters_json) + .unwrap_or(serde_json::json!({})), + }) + .to_string() + } +} +``` + +### Step 4: Rebuild and run tests + +Run: +```bash +cd amplifier-core/bindings/node && npm run build:debug && npx vitest run __tests__/modules.test.ts 2>&1 +``` +Expected: All 3 tests pass. + +### Step 5: Commit + +```bash +cd amplifier-core && git add bindings/node/ && git commit -m "feat(node): add JsToolBridge module interface" +``` + +> **Follow-up in this same task or as a sub-step:** After the Tool bridge is proven, add `JsProviderBridge` following the exact same pattern (name, get_info, complete, parse_tool_calls). The other interfaces (Orchestrator, ContextManager, ApprovalProvider) follow the same ThreadsafeFunction pattern. Each gets its own constructor, properties, and async methods. The pattern is identical — only the method names and signatures differ. + +--- + +## Task 8: Error Bridging + +**Why:** Rust errors need to become proper JS Error objects with typed `code` properties. JS exceptions in callbacks need to become Rust `Result::Err`. This task establishes the error taxonomy across the FFI boundary. + +**Reference:** The Rust errors are in `crates/amplifier-core/src/errors.rs`. The Python bridge converts them via `PyErr::new::(...)`. + +**Files:** +- Modify: `bindings/node/src/lib.rs` +- Create: `bindings/node/__tests__/errors.test.ts` + +### Step 1: Write the failing test + +Create the file `bindings/node/__tests__/errors.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { + JsAmplifierSession, + amplifierErrorToJs, +} from '../index.js'; + +describe('Error bridging', () => { + it('invalid JSON config throws with clear message', () => { + expect(() => new JsAmplifierSession('not json')).toThrow(/Invalid config JSON/); + }); + + it('missing orchestrator throws with field name', () => { + const config = JSON.stringify({ session: { context: 'simple' } }); + expect(() => new JsAmplifierSession(config)).toThrow(/orchestrator/); + }); + + it('missing context throws with field name', () => { + const config = JSON.stringify({ session: { orchestrator: 'basic' } }); + expect(() => new JsAmplifierSession(config)).toThrow(/context/); + }); + + it('amplifierErrorToJs converts error variants to typed objects', () => { + // Test the helper function that converts Rust AmplifierError to JS + const sessionError = amplifierErrorToJs('session', 'not initialized'); + expect(sessionError.code).toBe('SessionError'); + expect(sessionError.message).toBe('not initialized'); + + const toolError = amplifierErrorToJs('tool', 'tool not found: bash'); + expect(toolError.code).toBe('ToolError'); + + const providerError = amplifierErrorToJs('provider', 'rate limited'); + expect(providerError.code).toBe('ProviderError'); + + const hookError = amplifierErrorToJs('hook', 'handler failed'); + expect(hookError.code).toBe('HookError'); + + const contextError = amplifierErrorToJs('context', 'compaction failed'); + expect(contextError.code).toBe('ContextError'); + }); +}); +``` + +### Step 2: Run test to verify it fails + +Run: +```bash +cd amplifier-core/bindings/node && npx vitest run __tests__/errors.test.ts 2>&1 +``` +Expected: FAIL — `amplifierErrorToJs` doesn't exist yet. + +### Step 3: Implement error bridging + +Open `bindings/node/src/lib.rs`. Add: + +```rust +// --------------------------------------------------------------------------- +// Error bridging — Rust AmplifierError → JS Error with typed code +// --------------------------------------------------------------------------- + +/// Error info object returned to JS with a typed error code. +#[napi(object)] +pub struct JsAmplifierError { + /// Error category: "SessionError", "ToolError", "ProviderError", "HookError", "ContextError" + pub code: String, + /// Human-readable error message. + pub message: String, +} + +/// Convert an AmplifierError variant name + message to a typed JS error object. +/// +/// This is a helper exposed to JS for consistent error handling. +/// In practice, most errors are thrown directly as napi::Error — this helper +/// is for cases where you want structured error objects. +#[napi] +pub fn amplifier_error_to_js(variant: String, message: String) -> JsAmplifierError { + let code = match variant.as_str() { + "session" => "SessionError", + "tool" => "ToolError", + "provider" => "ProviderError", + "hook" => "HookError", + "context" => "ContextError", + _ => "AmplifierError", + }; + JsAmplifierError { + code: code.to_string(), + message, + } +} + +/// Internal helper: convert amplifier_core::AmplifierError to napi::Error. +fn amplifier_error_to_napi(err: amplifier_core::AmplifierError) -> napi::Error { + let (code, msg) = match &err { + amplifier_core::AmplifierError::Session(e) => ("SessionError", e.to_string()), + amplifier_core::AmplifierError::Tool(e) => ("ToolError", e.to_string()), + amplifier_core::AmplifierError::Provider(e) => ("ProviderError", e.to_string()), + amplifier_core::AmplifierError::Hook(e) => ("HookError", e.to_string()), + amplifier_core::AmplifierError::Context(e) => ("ContextError", e.to_string()), + }; + Error::from_reason(format!("[{code}] {msg}")) +} +``` + +### Step 4: Rebuild and run tests + +Run: +```bash +cd amplifier-core/bindings/node && npm run build:debug && npx vitest run __tests__/errors.test.ts 2>&1 +``` +Expected: All 5 tests pass. + +### Step 5: Commit + +```bash +cd amplifier-core && git add bindings/node/ && git commit -m "feat(node): add error bridging — Rust errors to typed JS errors" +``` + +--- + +## Task 9: Integration Tests + +**Why:** Verify the full binding layer works end-to-end: session lifecycle with TS-implemented modules, concurrent operations, cancellation, type fidelity across the FFI boundary. + +**Files:** +- Create: `bindings/node/__tests__/integration.test.ts` + +### Step 1: Write integration tests + +Create the file `bindings/node/__tests__/integration.test.ts`: + +```typescript +import { describe, it, expect } from 'vitest'; +import { + JsAmplifierSession, + JsCoordinator, + JsCancellationToken, + JsHookRegistry, + JsToolBridge, + HookAction, + SessionState, +} from '../index.js'; + +describe('Integration: Full session lifecycle', () => { + const validConfig = JSON.stringify({ + session: { + orchestrator: 'loop-basic', + context: 'context-simple', + }, + }); + + it('session → coordinator → hooks → cancel lifecycle', async () => { + // 1. Create session + const session = new JsAmplifierSession(validConfig); + expect(session.sessionId).toBeTruthy(); + expect(session.isInitialized).toBe(false); + + // 2. Access coordinator + const coord = session.coordinator; + expect(coord).toBeDefined(); + + // 3. Register capability + coord.registerCapability('test-cap', JSON.stringify({ enabled: true })); + const cap = coord.getCapability('test-cap'); + expect(cap).toBeTruthy(); + expect(JSON.parse(cap!).enabled).toBe(true); + + // 4. Use cancellation + const cancel = coord.cancellation; + expect(cancel.isCancelled).toBe(false); + cancel.requestGraceful(); + expect(cancel.isGraceful).toBe(true); + cancel.reset(); + expect(cancel.isCancelled).toBe(false); + + // 5. Cleanup + await session.cleanup(); + expect(session.isInitialized).toBe(false); + }); +}); + +describe('Integration: Hook handler roundtrip', () => { + it('JS handler receives event data and returns HookResult', async () => { + const registry = new JsHookRegistry(); + const receivedEvents: Array<{ event: string; data: any }> = []; + + registry.register( + 'tool:pre', + (event: string, dataJson: string) => { + const data = JSON.parse(dataJson); + receivedEvents.push({ event, data }); + return JSON.stringify({ + action: 'continue', + user_message: 'Tool approved', + user_message_level: 'info', + }); + }, + 0, + 'approval-hook', + ); + + const result = await registry.emit( + 'tool:pre', + JSON.stringify({ tool_name: 'bash', command: 'ls' }), + ); + + expect(receivedEvents.length).toBe(1); + expect(receivedEvents[0].event).toBe('tool:pre'); + expect(receivedEvents[0].data.tool_name).toBe('bash'); + expect(result.action).toBe(HookAction.Continue); + }); + + it('deny handler short-circuits pipeline', async () => { + const registry = new JsHookRegistry(); + let secondHandlerCalled = false; + + registry.register( + 'tool:pre', + (_e: string, _d: string) => { + return JSON.stringify({ action: 'deny', reason: 'not allowed' }); + }, + 0, + 'denier', + ); + + registry.register( + 'tool:pre', + (_e: string, _d: string) => { + secondHandlerCalled = true; + return JSON.stringify({ action: 'continue' }); + }, + 10, + 'after-deny', + ); + + const result = await registry.emit('tool:pre', '{}'); + expect(result.action).toBe(HookAction.Deny); + expect(result.reason).toBe('not allowed'); + expect(secondHandlerCalled).toBe(false); + }); +}); + +describe('Integration: Tool bridge execution', () => { + it('creates and executes a TS tool through the bridge', async () => { + const tool = new JsToolBridge( + 'calculator', + 'Adds two numbers', + JSON.stringify({ + type: 'object', + properties: { + a: { type: 'number' }, + b: { type: 'number' }, + }, + }), + async (inputJson: string) => { + const input = JSON.parse(inputJson); + const sum = (input.a || 0) + (input.b || 0); + return JSON.stringify({ success: true, output: String(sum) }); + }, + ); + + expect(tool.name).toBe('calculator'); + const specJson = tool.getSpec(); + const spec = JSON.parse(specJson); + expect(spec.name).toBe('calculator'); + expect(spec.parameters.type).toBe('object'); + + const resultJson = await tool.execute(JSON.stringify({ a: 3, b: 4 })); + const result = JSON.parse(resultJson); + expect(result.success).toBe(true); + expect(result.output).toBe('7'); + }); +}); + +describe('Integration: CancellationToken state machine', () => { + it('full state machine: none → graceful → immediate → reset → none', () => { + const token = new JsCancellationToken(); + + // None state + expect(token.isCancelled).toBe(false); + expect(token.isGraceful).toBe(false); + expect(token.isImmediate).toBe(false); + + // → Graceful + token.requestGraceful(); + expect(token.isCancelled).toBe(true); + expect(token.isGraceful).toBe(true); + expect(token.isImmediate).toBe(false); + + // → Immediate + token.requestImmediate(); + expect(token.isCancelled).toBe(true); + expect(token.isGraceful).toBe(false); + expect(token.isImmediate).toBe(true); + + // → Reset → None + token.reset(); + expect(token.isCancelled).toBe(false); + expect(token.isGraceful).toBe(false); + expect(token.isImmediate).toBe(false); + }); +}); + +describe('Integration: Type fidelity', () => { + it('SessionConfig validates required fields', () => { + // Valid config + expect( + () => + new JsAmplifierSession( + JSON.stringify({ + session: { orchestrator: 'x', context: 'y' }, + providers: { anthropic: { model: 'claude' } }, + metadata: { custom: true }, + }), + ), + ).not.toThrow(); + }); + + it('HookResult fields roundtrip correctly', async () => { + const registry = new JsHookRegistry(); + registry.register( + 'test:roundtrip', + (_e: string, _d: string) => { + return JSON.stringify({ + action: 'inject_context', + context_injection: 'Linter found 3 errors', + context_injection_role: 'system', + ephemeral: true, + suppress_output: true, + user_message: 'Found lint errors', + user_message_level: 'warning', + user_message_source: 'eslint-hook', + }); + }, + 0, + 'lint-hook', + ); + + const result = await registry.emit('test:roundtrip', '{}'); + expect(result.action).toBe(HookAction.InjectContext); + expect(result.context_injection).toBe('Linter found 3 errors'); + expect(result.ephemeral).toBe(true); + expect(result.suppress_output).toBe(true); + expect(result.user_message).toBe('Found lint errors'); + expect(result.user_message_source).toBe('eslint-hook'); + }); + + it('Coordinator toDict returns all expected fields', () => { + const coord = new JsCoordinator('{}'); + coord.registerCapability('streaming', '"true"'); + const dict = coord.toDict(); + + expect(Array.isArray(dict.tools)).toBe(true); + expect(Array.isArray(dict.providers)).toBe(true); + expect(typeof dict.has_orchestrator).toBe('boolean'); + expect(typeof dict.has_context).toBe('boolean'); + expect(Array.isArray(dict.capabilities)).toBe(true); + }); +}); +``` + +### Step 2: Build and run all tests + +Run: +```bash +cd amplifier-core/bindings/node && npm run build:debug && npx vitest run 2>&1 +``` +Expected: All tests across all test files pass (~65 total). + +### Step 3: Run Rust tests to verify nothing broke + +Run: +```bash +cd amplifier-core && cargo test --all 2>&1 +``` +Expected: All Rust tests still pass. + +### Step 4: Commit + +```bash +cd amplifier-core && git add bindings/node/ && git commit -m "test(node): add integration tests for full binding layer" +``` + +--- + +## Final Checklist + +After all 10 tasks are complete, verify: + +1. **Rust builds clean:** + ```bash + cd amplifier-core && cargo build --all 2>&1 + ``` + +2. **All Rust tests pass:** + ```bash + cd amplifier-core && cargo test --all 2>&1 + ``` + +3. **Node addon builds:** + ```bash + cd amplifier-core/bindings/node && npm run build:debug 2>&1 + ``` + +4. **All Vitest tests pass:** + ```bash + cd amplifier-core/bindings/node && npx vitest run 2>&1 + ``` + +5. **Generated types exist:** + ```bash + ls -la amplifier-core/bindings/node/index.js amplifier-core/bindings/node/index.d.ts + ``` + +6. **Type definitions are meaningful:** + ```bash + cat amplifier-core/bindings/node/index.d.ts | head -100 + ``` + Expected: TypeScript declarations with proper types (not `any` everywhere). + +--- + +## Deferred Work (NOT in this plan) + +These items are explicitly out of scope — tracked in the design doc's "Tracked Future Debt" table: + +1. **gRPC bridge fidelity fixes** — 27 `TODO(grpc-v2)` markers in the codebase +2. **`process_hook_result()` in Rust** — currently ~185 lines of Python-only code +3. **Cross-language module resolver** — Phase 4 +4. **npm publishing pipeline / CI** — separate follow-up +5. **Splitting `lib.rs` into modules** — when >3,000 lines (Future TODO #3) +6. **Unified Rust module storage** — consolidating per-language module dicts (Future TODO #1) +7. **`JsProviderBridge`**, **`JsOrchestratorBridge`**, **`JsContextManagerBridge`**, **`JsApprovalProviderBridge`** — follow the exact same `ThreadsafeFunction` pattern as `JsToolBridge`. Add them after the Tool bridge is proven. Each is ~50 lines of boilerplate. diff --git a/docs/plans/2026-03-05-phase3-wasm-module-loading-design.md b/docs/plans/2026-03-05-phase3-wasm-module-loading-design.md new file mode 100644 index 0000000..2d8915d --- /dev/null +++ b/docs/plans/2026-03-05-phase3-wasm-module-loading-design.md @@ -0,0 +1,357 @@ +# Phase 3: WASM Module Loading Design + +> Full WebAssembly Component Model integration for amplifier-core — all 6 module types loadable as `.wasm` components via wasmtime. + +**Status:** Approved +**Date:** 2026-03-05 +**Phase:** 3 of 5 (Cross-Language SDK) +**Parent design:** `docs/plans/2026-03-02-cross-language-session-sdk-design.md` +**Prerequisites:** PR #35 (Phase 2 — wasmtime 42 upgrade), PR #36 (gRPC v2 debt fix) + +--- + +## 1. Goal + +Implement full WASM module loading for amplifier-core via the WebAssembly Component Model and wasmtime. All 6 module types (Tool, Provider, Orchestrator, ContextManager, HookHandler, ApprovalProvider) get WASM bridges, WIT interface definitions, and a Rust guest SDK. This enables cross-language module authoring — compile a module to `.wasm` once, load it into any host (Python, TypeScript, Rust, future Go/C#). + +--- + +## 2. Background + +This is Phase 3 of the 5-phase Cross-Language SDK plan. Phase 3 depends on: + +- **PR #35 (Phase 2)** — wasmtime 29→42 upgrade. Wasmtime 42 provides mature Component Model support with the `bindgen!` macro and `wasmtime::component::*` APIs. +- **PR #36 (gRPC debt)** — bidirectional proto conversions (Message, ChatRequest, ChatResponse, HookResult), `Arc` on Session, all 9 KernelService RPCs implemented. + +Both PRs must merge before Phase 3 work begins. + +**Current state:** A `WasmToolBridge` stub exists (compiles WASM bytes, satisfies `Arc`, but `execute()` returns a hard error). A `Transport::Wasm` variant exists in dispatch. Zero `.wasm` test fixtures, zero `.wit` files, zero component model code. + +--- + +## 3. Key Design Decisions + +1. **Thin WIT + proto bytes** — WIT functions accept/return `list` (proto-serialized bytes), not rich WIT records. Same wire format as gRPC. Proto remains the single source of truth (CORE_DEVELOPMENT_PRINCIPLES §6). A module compiled for gRPC can be recompiled for WASM without code changes. + +2. **All 6 module types** — WIT definitions, bridge implementations, and tests for all 6. Tiered delivery within one PR: Tier 1 (pure compute: Tool, HookHandler, ContextManager, ApprovalProvider) first, then Tier 2 (needs host capabilities: Provider with WASI HTTP, Orchestrator with kernel-service host imports). + +3. **Developer experience first** — Module authors never see WIT or proto bytes directly. The guest SDK (`amplifier-guest` crate) provides familiar Amplifier types (`ToolSpec`, `ToolResult`, `ChatRequest`, etc.) and a single `export!` macro. Writing a WASM module looks nearly identical to writing a native Rust module. + +4. **Shared wasmtime Engine** — Single `Engine` instance reused across all WASM modules (engine creation is expensive, module instantiation is cheap). + +5. **Async via spawn_blocking** — WASM execution is synchronous CPU work. Bridges wrap calls in `tokio::task::spawn_blocking()` to avoid blocking the async runtime. + +--- + +## 4. Developer Experience + +The goal: a Rust developer writing a WASM Tool module writes code that looks almost identical to writing a native Rust Tool module. The WIT + proto bytes are hidden behind a guest SDK crate. + +**Native Rust module today:** +```rust +impl Tool for MyTool { + fn name(&self) -> &str { "my-tool" } + fn get_spec(&self) -> ToolSpec { ToolSpec { name: "my-tool".into(), ... } } + async fn execute(&self, input: Value) -> Result { + Ok(ToolResult { success: true, output: "done".into(), .. }) + } +} +``` + +**WASM module with guest SDK:** +```rust +use amplifier_guest::Tool; + +struct MyTool; + +impl Tool for MyTool { + fn name(&self) -> &str { "my-tool" } + fn get_spec(&self) -> ToolSpec { ToolSpec { name: "my-tool".into(), ... } } + fn execute(&self, input: Value) -> Result { + // same logic, sync (WASM is sync from guest perspective) + Ok(ToolResult { success: true, output: "done".into(), .. }) + } +} + +amplifier_guest::export!(MyTool); // macro handles WIT binding glue +``` + +**What the guest SDK hides:** +- WIT interface binding generation (via `wit-bindgen`) +- Proto serialization/deserialization of inputs and outputs +- The `list` boundary — module authors work with typed structs +- The `export!` macro wires the struct to the WIT exports + +**Same types, same names:** `ToolSpec`, `ToolResult`, `ChatRequest`, `ChatResponse`, `HookResult`, `Message` — all the same structs, re-exported through the guest SDK. A developer moving from native Rust to WASM changes their `Cargo.toml` dependency and adds the `export!` macro. The logic stays identical. + +For future non-Rust guests (Go, C#, C++ compiled to WASM via TinyGo, NativeAOT, Emscripten): the guest SDK would be a package in that language providing the same interface names. Phase 3 targets Rust guest modules only. + +--- + +## 5. WIT Interface Definitions + +All 6 module types defined as WIT interfaces using the thin proto bytes pattern: + +```wit +package amplifier:modules@1.0.0; + +// === Tier 1: Pure compute (no WASI, no host imports) === + +interface tool { + get-spec: func() -> list; + execute: func(request: list) -> result, string>; +} + +interface hook-handler { + handle: func(event: string, data: list) -> result, string>; +} + +interface context-manager { + add-message: func(message: list) -> result<_, string>; + get-messages: func() -> result, string>; + get-messages-for-request: func(request: list) -> result, string>; + set-messages: func(messages: list) -> result<_, string>; + clear: func() -> result<_, string>; +} + +interface approval-provider { + request-approval: func(request: list) -> result, string>; +} + +// === Tier 2: Needs host capabilities === + +interface provider { + get-info: func() -> list; + list-models: func() -> result, string>; + complete: func(request: list) -> result, string>; + parse-tool-calls: func(response: list) -> list; +} + +interface orchestrator { + execute: func(request: list) -> result, string>; +} +``` + +**Host-provided imports for Tier 2 modules:** + +```wit +// Kernel callbacks — WASM equivalent of gRPC KernelService +interface kernel-service { + execute-tool: func(name: string, input: list) -> result, string>; + complete-with-provider: func(name: string, request: list) -> result, string>; + emit-hook: func(event: string, data: list) -> result, string>; + get-messages: func() -> result, string>; + add-message: func(message: list) -> result<_, string>; + get-capability: func(name: string) -> result, string>; + register-capability: func(name: string, value: list) -> result<_, string>; +} +``` + +Provider gets WASI HTTP imports (via `wasi:http/outgoing-handler`) for making LLM API calls. Orchestrator gets `kernel-service` host imports for calling back into the kernel. + +All complex types are `list` (proto-serialized bytes). The WIT interfaces are thin wrappers. The proto schema remains the single source of truth. + +--- + +## 6. Component Model Host Infrastructure + +**Shared engine:** The current stub creates a new `wasmtime::Engine` per bridge. Phase 3 shares a single `Engine` across all WASM modules. The engine is stored on the Coordinator or passed through the transport layer. + +**Module lifecycle:** +1. **Compile time** (once): `cargo component build` produces a `.wasm` component binary +2. **Load time** (once per module): `Component::new()` validates and AOT-compiles the WASM +3. **Instantiate** (per call or pooled): `Linker::instantiate()` creates a `Store` + instance with imports wired + +**Bridge pattern** (same as gRPC): +1. Host code calls `bridge.execute(input)` +2. Bridge serializes input to proto bytes +3. Bridge calls WASM export via wasmtime +4. WASM guest deserializes, runs logic, serializes result +5. Bridge deserializes proto bytes back to native type (e.g. `ToolResult`) +6. Returns `Arc` result + +The key difference from gRPC: no network, no process management. The `.wasm` binary is loaded in-process. The bridge holds a `wasmtime::component::Instance` instead of a `tonic::Channel`. + +**Async handling:** WASM execution is synchronous CPU work. The bridge wraps calls in `tokio::task::spawn_blocking()` to avoid blocking the async runtime, then awaits the result. + +--- + +## 7. Guest SDK (`amplifier-guest`) + +A Rust crate that module authors depend on. It hides all WIT/proto plumbing behind familiar Amplifier types and a single `export!` macro. + +**Crate structure:** +``` +amplifier-guest/ +├── Cargo.toml # depends on wit-bindgen, prost, amplifier-core (types only) +├── src/ +│ ├── lib.rs # re-exports types + export! macro +│ ├── types.rs # ToolSpec, ToolResult, ChatRequest, ChatResponse, etc. +│ └── bindings.rs # generated from WIT via wit-bindgen (build.rs) +└── wit/ + └── amplifier-modules.wit # the WIT definitions from Section 5 +``` + +**What it provides to module authors:** +- `amplifier_guest::Tool` trait (same method signatures as `amplifier_core::Tool`, minus the async) +- `amplifier_guest::Provider`, `HookHandler`, `ContextManager`, `Orchestrator`, `ApprovalProvider` traits +- All data types: `ToolSpec`, `ToolResult`, `ChatRequest`, `ChatResponse`, `HookResult`, `Message`, etc. +- `amplifier_guest::export!(MyTool)` macro that generates the WIT binding glue +- For Tier 2 modules: `amplifier_guest::kernel::execute_tool()`, `kernel::complete_with_provider()`, etc. — typed wrappers around the host `kernel-service` imports + +**Location:** New crate at `crates/amplifier-guest/`. It is a compile-time dependency for WASM module authors, not a runtime dependency of the kernel. + +**Build workflow for module authors:** +```bash +cargo component build --release +# Produces: target/wasm32-wasip2/release/my_tool.wasm +``` + +--- + +## 8. Bridge Implementations + +6 WASM bridge structs, mirroring the 6 gRPC bridges. Each follows the identical pattern: hold a wasmtime `Instance`, serialize inputs to proto bytes, call the WASM export, deserialize proto bytes back to native types, implement the corresponding Rust trait. + +### Tier 1 Bridges (Pure Compute) + +| Bridge | Trait | WASM Exports Called | Host Imports | +|---|---|---|---| +| `WasmToolBridge` | `Tool` | `get-spec`, `execute` | None | +| `WasmHookBridge` | `HookHandler` | `handle` | None | +| `WasmContextBridge` | `ContextManager` | `add-message`, `get-messages`, `get-messages-for-request`, `set-messages`, `clear` | None | +| `WasmApprovalBridge` | `ApprovalProvider` | `request-approval` | None | + +### Tier 2 Bridges (Needs Host Capabilities) + +| Bridge | Trait | WASM Exports Called | Host Imports | +|---|---|---|---| +| `WasmProviderBridge` | `Provider` | `get-info`, `list-models`, `complete`, `parse-tool-calls` | WASI HTTP (`wasi:http/outgoing-handler`) | +| `WasmOrchestratorBridge` | `Orchestrator` | `execute` | `kernel-service` (custom host imports) | + +**Each bridge struct holds:** +```rust +pub struct WasmToolBridge { + engine: Arc, // shared across all WASM modules + component: Component, // AOT-compiled WASM component + linker: Linker, // pre-configured with imports + name: String, +} +``` + +**Async wrapping:** All bridge trait methods use `tokio::task::spawn_blocking()` since WASM execution is synchronous CPU work. + +**Transport dispatch:** `transport.rs` gets `load_wasm_*` functions for all 6 module types (currently only `load_wasm_tool` exists). Each accepts `&[u8]` or `&Path` and returns `Arc`. + +--- + +## 9. Test Fixtures & E2E Testing + +### Test Fixtures + +All fixtures compiled from Rust guest code using the `amplifier-guest` crate. They live in `tests/fixtures/wasm/` as pre-compiled `.wasm` binaries committed to the repo. A `build-fixtures.sh` script recompiles them from source in `tests/fixtures/wasm/src/`. + +| Fixture | Module Type | What it does | Validates | +|---|---|---|---| +| `echo-tool.wasm` | Tool | Returns input as output | Basic WIT + proto roundtrip | +| `deny-hook.wasm` | HookHandler | Returns `HookAction::Deny` | Hook bridge + HookResult serialization | +| `memory-context.wasm` | ContextManager | In-memory message store | Stateful WASM module (multi-call state) | +| `auto-approve.wasm` | ApprovalProvider | Always approves | Approval bridge + proto roundtrip | +| `echo-provider.wasm` | Provider | Returns canned ChatResponse | WASI HTTP imports (mocked in test) | +| `passthrough-orchestrator.wasm` | Orchestrator | Calls one tool via kernel-service import, returns result | Host kernel-service imports | + +### E2E Tests + +All behind `#[cfg(feature = "wasm")]`: + +```rust +#[test] fn load_echo_tool_from_bytes() // load .wasm, verify name/spec +#[tokio::test] async fn echo_tool_execute() // full execute roundtrip +#[tokio::test] async fn hook_handler_deny() // deny hook fires correctly +#[tokio::test] async fn context_manager_roundtrip() // add + get messages +#[tokio::test] async fn approval_auto_approve() // approval request → approved +#[tokio::test] async fn provider_complete() // ChatRequest → ChatResponse +#[tokio::test] async fn orchestrator_calls_kernel() // orchestrator → host import → tool +``` + +The **cross-language validation** test loads the same `echo-tool.wasm` from a Python host (via PyO3 bridge) and a TypeScript host (via Napi-RS bridge), proving the `.wasm` binary is truly portable across host languages. + +--- + +## 10. Transport Matrix (Complete Picture) + +How all languages connect to the Amplifier kernel: + +### Host App Bindings (In-Process) + +Run the kernel in your language: + +| Language | Binding | Mechanism | Status | +|---|---|---|---| +| Rust | Native | Direct Rust | Complete | +| Python | PyO3 | Rust ↔ CPython FFI | Complete (Phase 1) | +| TypeScript | Napi-RS | Rust ↔ V8 FFI | PR #35 (Phase 2) | +| Go | CGo | Rust ↔ Go FFI via C ABI | Future (TODO #4) | +| C# | P/Invoke | Rust ↔ .NET FFI via C ABI | Future (TODO #4) | +| C/C++ | C header | Direct C ABI | Future (TODO #4) | + +### Module Authoring (Cross-Language) + +Write a module in any language, plug into any host: + +| Transport | Mechanism | Overhead | Use case | +|---|---|---|---| +| Native | Direct Rust traits | Zero | Rust modules in Rust host | +| PyO3 | In-process FFI | Minimal | Python modules in Python host | +| Napi-RS | In-process FFI | Minimal | TS modules in TS host | +| WASM | wasmtime in-process | ~10-70μs/call | Cross-language portable modules (**Phase 3, this work**) | +| gRPC | Out-of-process RPC | ~1-5ms/call | Sidecar/microservice modules | + +Developers don't choose transport — Phase 4 (module resolver) auto-detects. WASM is the default cross-language path; gRPC is opt-in for microservice deployments. + +--- + +## 11. Deliverables + +1. **`wit/amplifier-modules.wit`** — WIT interface definitions for all 6 module types + `kernel-service` host imports +2. **`crates/amplifier-guest/`** — Rust guest SDK crate with traits, types, `export!` macro, and kernel-service wrappers +3. **6 WASM bridge implementations** in `crates/amplifier-core/src/bridges/` — `WasmToolBridge` (rewritten from stub), `WasmHookBridge`, `WasmContextBridge`, `WasmApprovalBridge`, `WasmProviderBridge`, `WasmOrchestratorBridge` +4. **Shared `Engine` management** — single wasmtime engine reused across all WASM modules +5. **`transport.rs`** — `load_wasm_*` functions for all 6 module types (file path + bytes variants) +6. **6 test fixture `.wasm` binaries** compiled from Rust guest code using `amplifier-guest` +7. **E2E tests** for all 6 module types behind `#[cfg(feature = "wasm")]` +8. **WASI HTTP integration** for Provider bridge, **kernel-service host imports** for Orchestrator bridge + +### Tiered Delivery (Within One PR) + +- **Tier 1 commits:** WIT definitions + guest SDK + Tier 1 bridges (Tool, HookHandler, ContextManager, ApprovalProvider) + Tier 1 test fixtures and E2E tests. Validates the WIT + Component Model foundation on simpler modules. +- **Tier 2 commits:** Tier 2 bridges (Provider with WASI HTTP, Orchestrator with kernel-service host imports) + Tier 2 test fixtures and E2E tests. Adds host capability complexity on top of the proven foundation. + +--- + +## 12. Dependencies + +**Must merge first:** +- **PR #35** (Phase 2) — contains wasmtime 29→42 upgrade. Phase 3 needs wasmtime 42 for mature Component Model APIs (`bindgen!`, `wasmtime::component::*`). +- **PR #36** (gRPC debt) — contains the bidirectional proto conversions (Message, ChatRequest, ChatResponse, HookResult) that the WASM bridges reuse for serialization, plus `Arc` on Session and all KernelService RPCs implemented. + +--- + +## 13. Not In Scope + +- Non-Rust guest SDKs (Go, C#, C++ guest SDKs are Phase 5) +- Module resolver auto-detection of `.wasm` files (Phase 4) +- Browser WASM host (webruntime concern, not kernel) +- Hot-reload of WASM modules +- WASM module marketplace +- Go/C#/C++ native host bindings (in-process, like PyO3/Napi-RS) + +--- + +## 14. Tracked Future Work + +Adding to the list from prior phases: + +- **Future TODO #4:** Go/C#/C++ native host bindings (in-process, like PyO3/Napi-RS) — CGo, P/Invoke, C ABI +- **Future TODO #5:** Non-Rust WASM guest SDKs (TinyGo, NativeAOT, Emscripten) — so non-Rust authors can compile to `.wasm` targeting the same WIT interfaces +- **Future TODO #6:** WASM module hot-reload \ No newline at end of file diff --git a/docs/plans/2026-03-05-phase3-wasm-module-loading-implementation.md b/docs/plans/2026-03-05-phase3-wasm-module-loading-implementation.md new file mode 100644 index 0000000..c78f33f --- /dev/null +++ b/docs/plans/2026-03-05-phase3-wasm-module-loading-implementation.md @@ -0,0 +1,3260 @@ +# Phase 3: WASM Module Loading — Implementation Plan + +> **Execution:** Use the subagent-driven-development workflow to implement this plan. + +**Goal:** Replace the `WasmToolBridge` stub with full WebAssembly Component Model integration for all 6 module types, including a Rust guest SDK, WIT interface definitions, and E2E tests with real `.wasm` fixtures. + +**Architecture:** The WASM bridges mirror the existing gRPC bridges — each holds a wasmtime component instance, serializes inputs to proto bytes (`list` on the WIT boundary), calls WASM exports, and deserializes results back to native Rust types. A shared `wasmtime::Engine` is reused across all WASM modules. A new `amplifier-guest` crate provides module authors with familiar Amplifier types and an `export!` macro that hides all WIT/proto plumbing. + +**Tech Stack:** Rust, wasmtime 42 (Component Model), WIT (WebAssembly Interface Types), prost (proto serialization), wit-bindgen (guest binding generation), cargo-component (WASM compilation) + +--- + +## Prerequisites + +> **CRITICAL:** This work depends on two unmerged PRs. Both MUST be merged to `main` before starting. +> +> - **PR #35** (Phase 2) — upgrades wasmtime from v29 to v42. Phase 3 needs wasmtime 42 for `wasmtime::component::*` APIs and the `bindgen!` macro. +> - **PR #36** (gRPC debt) — adds bidirectional proto conversions in `src/generated/conversions.rs`, puts `Arc` on Session, and implements all 9 KernelService RPCs. +> +> All code in this plan assumes the TARGET state after both PRs merge: +> - `wasmtime = "42"` in `Cargo.toml` +> - `wasmtime::component::{Component, Linker, Store, bindgen}` available +> - Proto conversion helpers in `crates/amplifier-core/src/generated/conversions.rs` +> - `Arc` accessible on `Session` + +--- + +## Glossary (Read This First) + +If you've never seen these terms before, here's what they mean: + +| Term | What it is | +|---|---| +| **WIT** | WebAssembly Interface Types — a text format defining function signatures for WASM modules. Like `.proto` for WASM. | +| **Component Model** | The WASM standard for inter-module communication. Defines how host (Rust kernel) talks to guest (WASM module). | +| **wasmtime** | A Rust library that runs WASM modules. We use its Component Model APIs. | +| **Guest** | The WASM module (e.g., a Tool written in Rust, compiled to `.wasm`). Runs *inside* the sandbox. | +| **Host** | The Rust kernel that *loads* and *calls* the WASM module. That's us (amplifier-core). | +| **Bridge** | A Rust struct that holds a WASM instance and implements a native trait (e.g., `impl Tool for WasmToolBridge`). Makes WASM modules look like native modules. | +| **`list`** | WIT type for "a byte array." We use it to pass proto-serialized bytes across the WASM boundary. | +| **`export!` macro** | A macro in the guest SDK that generates WIT binding glue so module authors don't see WIT or proto bytes. | +| **`bindgen!` macro** | A wasmtime macro used on the HOST side to generate Rust types from WIT definitions. | +| **`cargo component build`** | A cargo subcommand that compiles Rust to a WASM component (`.wasm` file). | +| **Proto bytes** | Data serialized using Protocol Buffers (prost). Same format used by gRPC bridges. Shared wire format. | +| **Tier 1** | Modules that do pure computation (Tool, HookHandler, ContextManager, ApprovalProvider). No host imports needed. | +| **Tier 2** | Modules that need host capabilities (Provider needs HTTP, Orchestrator needs kernel-service callbacks). | + +--- + +## File Map + +Here's every file this plan touches. Read this before starting any task. + +``` +amplifier-core/ +├── wit/ +│ └── amplifier-modules.wit # NEW — Task 0 +├── crates/ +│ ├── amplifier-guest/ # NEW — Tasks 2-5 +│ │ ├── Cargo.toml +│ │ ├── build.rs +│ │ ├── wit/ +│ │ │ └── amplifier-modules.wit # Symlink or copy from ../../wit/ +│ │ └── src/ +│ │ ├── lib.rs +│ │ ├── types.rs +│ │ └── bindings.rs # Generated by build.rs +│ └── amplifier-core/ +│ ├── Cargo.toml # MODIFY — Tasks 1, 10 +│ ├── src/ +│ │ ├── lib.rs # MODIFY — Task 1 (re-export wasm_engine) +│ │ ├── wasm_engine.rs # NEW — Task 1 +│ │ ├── transport.rs # MODIFY — Task 18 +│ │ └── bridges/ +│ │ ├── mod.rs # MODIFY — Tasks 10-13, 16-17 +│ │ ├── wasm_tool.rs # REWRITE — Task 10 +│ │ ├── wasm_hook.rs # NEW — Task 11 +│ │ ├── wasm_context.rs # NEW — Task 12 +│ │ ├── wasm_approval.rs # NEW — Task 13 +│ │ ├── wasm_provider.rs # NEW — Task 16 +│ │ └── wasm_orchestrator.rs # NEW — Task 17 +│ └── tests/ +│ └── wasm_tool_e2e.rs # REWRITE — Task 19 +├── tests/ +│ └── fixtures/ +│ └── wasm/ +│ ├── build-fixtures.sh # NEW — Task 19 +│ ├── echo-tool.wasm # NEW — Task 6 +│ ├── deny-hook.wasm # NEW — Task 7 +│ ├── memory-context.wasm # NEW — Task 8 +│ ├── auto-approve.wasm # NEW — Task 9 +│ ├── echo-provider.wasm # NEW — Task 14 +│ ├── passthrough-orchestrator.wasm # NEW — Task 15 +│ └── src/ +│ ├── echo-tool/ # NEW — Task 6 +│ │ ├── Cargo.toml +│ │ └── src/lib.rs +│ ├── deny-hook/ # NEW — Task 7 +│ │ ├── Cargo.toml +│ │ └── src/lib.rs +│ ├── memory-context/ # NEW — Task 8 +│ │ ├── Cargo.toml +│ │ └── src/lib.rs +│ ├── auto-approve/ # NEW — Task 9 +│ │ ├── Cargo.toml +│ │ └── src/lib.rs +│ ├── echo-provider/ # NEW — Task 14 +│ │ ├── Cargo.toml +│ │ └── src/lib.rs +│ └── passthrough-orchestrator/ # NEW — Task 15 +│ ├── Cargo.toml +│ └── src/lib.rs +``` + +--- + +## Task 0: WIT Interface Definitions + +**What:** Create the WIT file that defines the contract between host (kernel) and guest (WASM modules). This is the WASM equivalent of the `.proto` file. Every WASM bridge and every guest module depends on this file. + +**Why first:** Everything else — guest SDK bindings, host bindgen, bridge code — is generated from this file. + +**Files:** +- Create: `wit/amplifier-modules.wit` + +### Step 1: Create the WIT file + +Create the file `wit/amplifier-modules.wit` with the following content: + +```wit +package amplifier:modules@1.0.0; + +// === Tier 1: Pure compute (no WASI, no host imports) === + +// Tool module interface — implements get-spec and execute. +// All complex types are proto-serialized bytes (list). +interface tool { + // Returns a proto-serialized ToolSpec. + get-spec: func() -> list; + + // Executes the tool with proto-serialized input. + // Returns proto-serialized ToolResult on success, or an error string. + execute: func(request: list) -> result, string>; +} + +// Hook handler interface — intercepts lifecycle events. +interface hook-handler { + // Handles an event. Returns proto-serialized HookResult. + handle: func(event: string, data: list) -> result, string>; +} + +// Context manager interface — manages conversation message history. +interface context-manager { + // Add a single message (proto-serialized). + add-message: func(message: list) -> result<_, string>; + + // Get all messages. Returns proto-serialized message list. + get-messages: func() -> result, string>; + + // Get messages for an LLM request (with budget/provider context). + // Input is proto-serialized GetMessagesForRequestParams. + get-messages-for-request: func(request: list) -> result, string>; + + // Replace all messages. Input is proto-serialized message list. + set-messages: func(messages: list) -> result<_, string>; + + // Clear all messages. + clear: func() -> result<_, string>; +} + +// Approval provider interface — presents approval dialogs to users. +interface approval-provider { + // Request user approval. Input/output are proto-serialized. + request-approval: func(request: list) -> result, string>; +} + +// === Tier 2: Needs host capabilities === + +// Provider interface — LLM completions. +interface provider { + // Returns proto-serialized ProviderInfo. + get-info: func() -> list; + + // Returns proto-serialized list of ModelInfo. + list-models: func() -> result, string>; + + // Complete a chat request. Input/output are proto-serialized. + complete: func(request: list) -> result, string>; + + // Extract tool calls from a response. Input/output are proto-serialized. + parse-tool-calls: func(response: list) -> list; +} + +// Orchestrator interface — high-level prompt execution. +interface orchestrator { + // Execute the agent loop. Input is proto-serialized OrchestratorExecuteRequest. + // Returns response string on success. + execute: func(request: list) -> result, string>; +} + +// === Host-provided imports for Tier 2 modules === + +// Kernel callbacks — WASM equivalent of gRPC KernelService. +// The host (Rust kernel) implements these; the guest (WASM module) calls them. +interface kernel-service { + // Execute a tool by name. Input/output are proto-serialized. + execute-tool: func(name: string, input: list) -> result, string>; + + // Complete with a named provider. Input/output are proto-serialized. + complete-with-provider: func(name: string, request: list) -> result, string>; + + // Emit a hook event. Returns proto-serialized HookResult. + emit-hook: func(event: string, data: list) -> result, string>; + + // Get all messages from context. Returns proto-serialized message list. + get-messages: func() -> result, string>; + + // Add a message to context. Input is proto-serialized. + add-message: func(message: list) -> result<_, string>; + + // Get a named capability. Returns proto-serialized value. + get-capability: func(name: string) -> result, string>; + + // Register a named capability. Value is proto-serialized. + register-capability: func(name: string, value: list) -> result<_, string>; +} + +// === World definitions === + +// Tier 1 worlds — no imports needed +world tool-module { + export tool; +} + +world hook-module { + export hook-handler; +} + +world context-module { + export context-manager; +} + +world approval-module { + export approval-provider; +} + +// Tier 2 worlds — import host capabilities +world provider-module { + import wasi:http/outgoing-handler@0.2.0; + export provider; +} + +world orchestrator-module { + import kernel-service; + export orchestrator; +} +``` + +### Step 2: Verify the file is valid WIT syntax + +Run: +```bash +cd amplifier-core +cat wit/amplifier-modules.wit | head -5 +``` +Expected: The first 5 lines of the file you just created. (Full WIT validation requires `wasm-tools`, which we'll install later with `cargo component`. For now, verify the file exists and is readable.) + +### Step 3: Commit + +```bash +cd amplifier-core +git add wit/amplifier-modules.wit +git commit -m "feat(wasm): add WIT interface definitions for all 6 module types + +Defines amplifier:modules@1.0.0 package with: +- Tier 1: tool, hook-handler, context-manager, approval-provider +- Tier 2: provider (with WASI HTTP import), orchestrator (with kernel-service import) +- kernel-service host interface (7 callbacks matching KernelService gRPC) +- World definitions for each module type + +All complex types use list (proto-serialized bytes) — same wire +format as gRPC. Proto schema remains the single source of truth." +``` + +--- + +## Task 1: Shared Wasmtime Engine Infrastructure + +**What:** Create a `WasmEngine` wrapper that holds a shared `Arc`. Engine creation is expensive (~50ms); module instantiation is cheap (~1ms). All WASM bridges share one engine. + +**Why:** The current `WasmToolBridge` stub creates a NEW `wasmtime::Engine` per bridge. That's wasteful. Every bridge in this plan will accept an `Arc` from this shared wrapper. + +**Files:** +- Create: `crates/amplifier-core/src/wasm_engine.rs` +- Modify: `crates/amplifier-core/src/lib.rs` (add module declaration) +- Modify: `crates/amplifier-core/Cargo.toml` (update wasmtime features) + +### Step 1: Write the test (inline in the new file) + +Create `crates/amplifier-core/src/wasm_engine.rs`: + +```rust +//! Shared wasmtime Engine for WASM module loading. +//! +//! [`WasmEngine`] wraps a single `wasmtime::Engine` behind an `Arc` so all +//! WASM bridge instances share the same engine. Engine creation is expensive +//! (~50ms); module instantiation is cheap (~1ms). +//! +//! Gated behind the `wasm` feature flag. + +use std::sync::Arc; +use wasmtime::Engine; + +/// Shared wasmtime engine wrapper. +/// +/// Create one of these at application startup and pass it to all +/// `load_wasm_*` functions. The engine is thread-safe (`Arc`). +/// +/// # Example +/// +/// ```rust,no_run +/// use amplifier_core::wasm_engine::WasmEngine; +/// +/// let engine = WasmEngine::new().expect("wasmtime engine creation failed"); +/// // Pass engine.inner() to bridge constructors +/// ``` +#[derive(Clone)] +pub struct WasmEngine { + engine: Arc, +} + +impl WasmEngine { + /// Create a new shared wasmtime engine with default configuration. + /// + /// Enables the Component Model, which is required for WIT-based WASM modules. + pub fn new() -> Result> { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + let engine = Engine::new(&config)?; + Ok(Self { + engine: Arc::new(engine), + }) + } + + /// Get a reference-counted handle to the inner wasmtime engine. + /// + /// Pass this to bridge constructors. + pub fn inner(&self) -> Arc { + Arc::clone(&self.engine) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn engine_creates_successfully() { + let engine = WasmEngine::new(); + assert!(engine.is_ok(), "WasmEngine::new() should succeed"); + } + + #[test] + fn engine_clone_shares_same_arc() { + let engine1 = WasmEngine::new().unwrap(); + let engine2 = engine1.clone(); + // Both should point to the same inner Engine (Arc refcount = 2) + assert!(Arc::ptr_eq(&engine1.inner(), &engine2.inner())); + } + + #[test] + fn engine_inner_returns_valid_arc() { + let engine = WasmEngine::new().unwrap(); + let inner = engine.inner(); + // Should have at least 2 strong references (one in WasmEngine, one returned) + assert!(Arc::strong_count(&inner) >= 2); + } +} +``` + +### Step 2: Register the module in lib.rs + +Open `crates/amplifier-core/src/lib.rs`. Find the line: + +```rust +pub mod transport; +``` + +Add immediately AFTER it: + +```rust +#[cfg(feature = "wasm")] +pub mod wasm_engine; +``` + +### Step 3: Update Cargo.toml for Component Model support + +Open `crates/amplifier-core/Cargo.toml`. The current wasmtime dependency is: + +```toml +wasmtime = { version = "29", optional = true } +``` + +**After PR #35 merges**, this will be version 42. Replace it with: + +```toml +wasmtime = { version = "42", optional = true, features = ["component-model"] } +``` + +> **NOTE TO IMPLEMENTER:** If PR #35 hasn't merged yet and you see `version = "29"`, STOP. Wait for PR #35 to merge. The `component-model` feature and `wasmtime::component::*` APIs don't exist in v29. + +### Step 4: Run the tests + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- wasm_engine --verbose +``` + +Expected: 3 tests pass: +``` +test wasm_engine::tests::engine_creates_successfully ... ok +test wasm_engine::tests::engine_clone_shares_same_arc ... ok +test wasm_engine::tests::engine_inner_returns_valid_arc ... ok +``` + +### Step 5: Run clippy + +```bash +cd amplifier-core +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +Expected: No warnings or errors. + +### Step 6: Commit + +```bash +cd amplifier-core +git add crates/amplifier-core/src/wasm_engine.rs crates/amplifier-core/src/lib.rs crates/amplifier-core/Cargo.toml +git commit -m "feat(wasm): add shared WasmEngine wrapper for wasmtime Component Model + +- WasmEngine holds Arc shared across all WASM bridges +- Enables component-model feature in wasmtime config +- Feature-gated behind #[cfg(feature = \"wasm\")] +- 3 unit tests for creation, clone sharing, and Arc validity" +``` + +--- + +## Task 2: Scaffold `amplifier-guest` Crate + +**What:** Create the new `crates/amplifier-guest/` crate. This is the SDK that WASM module authors depend on. It provides familiar Amplifier types and hides all WIT/proto plumbing. + +**Why:** Module authors should never see WIT or proto bytes directly. They `use amplifier_guest::Tool` and implement the same trait they'd use for a native module. + +**Files:** +- Create: `crates/amplifier-guest/Cargo.toml` +- Create: `crates/amplifier-guest/src/lib.rs` +- Create: `crates/amplifier-guest/src/types.rs` +- Create: `crates/amplifier-guest/wit/amplifier-modules.wit` (copy from `wit/`) +- Modify: `Cargo.toml` (workspace root — add member) + +### Step 1: Add workspace member + +Open the workspace root `Cargo.toml` (at `amplifier-core/Cargo.toml`). Find: + +```toml +[workspace] +members = [ + "crates/amplifier-core", + "bindings/python", +] +``` + +Add `"crates/amplifier-guest"` to the members list: + +```toml +[workspace] +members = [ + "crates/amplifier-core", + "crates/amplifier-guest", + "bindings/python", +] +``` + +### Step 2: Create Cargo.toml for the guest crate + +Create `crates/amplifier-guest/Cargo.toml`: + +```toml +[package] +name = "amplifier-guest" +version = "0.1.0" +edition = "2021" +description = "Guest SDK for writing Amplifier WASM modules" +license = "MIT" + +[dependencies] +# Proto serialization — same format as gRPC wire +prost = "0.13" + +# JSON handling for ToolSpec parameters, ToolResult output, etc. +serde = { version = "1", features = ["derive"] } +serde_json = "1" + +# WIT binding generation (guest side) +wit-bindgen = "0.41" +``` + +> **NOTE TO IMPLEMENTER:** The `wit-bindgen` version must match the wasmtime 42 ecosystem. If `0.41` doesn't compile, check the [wit-bindgen releases](https://github.com/bytecodealliance/wit-bindgen/releases) for the version compatible with wasmtime 42. + +### Step 3: Copy the WIT file into the guest crate + +```bash +cd amplifier-core +mkdir -p crates/amplifier-guest/wit +cp wit/amplifier-modules.wit crates/amplifier-guest/wit/amplifier-modules.wit +``` + +### Step 4: Create the types module + +Create `crates/amplifier-guest/src/types.rs`: + +```rust +//! Core data types for WASM module authors. +//! +//! These types mirror the native `amplifier_core` types (ToolSpec, ToolResult, +//! HookResult, etc.) but are standalone — the guest crate does NOT depend on +//! amplifier-core at runtime. +//! +//! Module authors work with these typed structs. The `export!` macro handles +//! proto serialization/deserialization behind the scenes. + +use std::collections::HashMap; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +// --------------------------------------------------------------------------- +// Tool types +// --------------------------------------------------------------------------- + +/// Tool specification — name, description, JSON Schema parameters. +/// +/// Mirrors `amplifier_core::messages::ToolSpec`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolSpec { + pub name: String, + pub parameters: HashMap, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub description: Option, +} + +/// Result from tool execution. +/// +/// Mirrors `amplifier_core::models::ToolResult`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolResult { + #[serde(default = "default_true")] + pub success: bool, + #[serde(default)] + pub output: Option, + #[serde(default)] + pub error: Option>, +} + +fn default_true() -> bool { + true +} + +impl Default for ToolResult { + fn default() -> Self { + Self { + success: true, + output: None, + error: None, + } + } +} + +// --------------------------------------------------------------------------- +// Hook types +// --------------------------------------------------------------------------- + +/// Action type for hook results. +/// +/// Mirrors `amplifier_core::models::HookAction`. +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum HookAction { + #[default] + Continue, + Deny, + Modify, + InjectContext, + AskUser, +} + +/// Role for context injection messages. +/// +/// Mirrors `amplifier_core::models::ContextInjectionRole`. +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ContextInjectionRole { + #[default] + System, + User, + Assistant, +} + +/// Default decision on approval timeout. +/// +/// Mirrors `amplifier_core::models::ApprovalDefault`. +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ApprovalDefault { + Allow, + #[default] + Deny, +} + +/// Severity level for user messages from hooks. +/// +/// Mirrors `amplifier_core::models::UserMessageLevel`. +#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum UserMessageLevel { + #[default] + Info, + Warning, + Error, +} + +/// Result from hook execution. +/// +/// Mirrors `amplifier_core::models::HookResult`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HookResult { + #[serde(default)] + pub action: HookAction, + #[serde(default)] + pub data: Option>, + #[serde(default)] + pub reason: Option, + #[serde(default)] + pub context_injection: Option, + #[serde(default)] + pub context_injection_role: ContextInjectionRole, + #[serde(default)] + pub ephemeral: bool, + #[serde(default)] + pub approval_prompt: Option, + #[serde(default)] + pub approval_options: Option>, + #[serde(default = "default_approval_timeout")] + pub approval_timeout: f64, + #[serde(default)] + pub approval_default: ApprovalDefault, + #[serde(default)] + pub suppress_output: bool, + #[serde(default)] + pub user_message: Option, + #[serde(default)] + pub user_message_level: UserMessageLevel, + #[serde(default)] + pub user_message_source: Option, + #[serde(default)] + pub append_to_last_tool_result: bool, +} + +fn default_approval_timeout() -> f64 { + 300.0 +} + +impl Default for HookResult { + fn default() -> Self { + Self { + action: HookAction::default(), + data: None, + reason: None, + context_injection: None, + context_injection_role: ContextInjectionRole::default(), + ephemeral: false, + approval_prompt: None, + approval_options: None, + approval_timeout: default_approval_timeout(), + approval_default: ApprovalDefault::default(), + suppress_output: false, + user_message: None, + user_message_level: UserMessageLevel::default(), + user_message_source: None, + append_to_last_tool_result: false, + } + } +} + +// --------------------------------------------------------------------------- +// Approval types +// --------------------------------------------------------------------------- + +/// Request for user approval. +/// +/// Mirrors `amplifier_core::models::ApprovalRequest`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApprovalRequest { + pub tool_name: String, + pub action: String, + #[serde(default)] + pub details: HashMap, + pub risk_level: String, + #[serde(default)] + pub timeout: Option, +} + +/// Response to an approval request. +/// +/// Mirrors `amplifier_core::models::ApprovalResponse`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApprovalResponse { + pub approved: bool, + #[serde(default)] + pub reason: Option, + #[serde(default)] + pub remember: bool, +} + +// --------------------------------------------------------------------------- +// Provider types +// --------------------------------------------------------------------------- + +/// Provider metadata. +/// +/// Mirrors `amplifier_core::models::ProviderInfo`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProviderInfo { + pub id: String, + pub display_name: String, + #[serde(default)] + pub credential_env_vars: Vec, + #[serde(default)] + pub capabilities: Vec, + #[serde(default)] + pub defaults: HashMap, +} + +/// Model metadata. +/// +/// Mirrors `amplifier_core::models::ModelInfo`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ModelInfo { + pub id: String, + pub display_name: String, + pub context_window: i64, + pub max_output_tokens: i64, + #[serde(default)] + pub capabilities: Vec, + #[serde(default)] + pub defaults: HashMap, +} + +// --------------------------------------------------------------------------- +// Chat types (simplified for guest use) +// --------------------------------------------------------------------------- + +/// A chat request (proto-serialized across the boundary). +/// +/// Guest modules receive this as pre-serialized bytes via the `export!` macro. +/// For Provider modules that need to inspect the request, this provides +/// typed access. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ChatRequest { + pub messages: Vec, + #[serde(default)] + pub model: Option, + #[serde(default)] + pub temperature: Option, + #[serde(default)] + pub max_output_tokens: Option, + #[serde(flatten)] + pub extra: HashMap, +} + +/// A chat response. +/// +/// Mirrors `amplifier_core::messages::ChatResponse` (simplified). +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ChatResponse { + pub content: Vec, + #[serde(default)] + pub tool_calls: Option>, + #[serde(default)] + pub finish_reason: Option, + #[serde(flatten)] + pub extra: HashMap, +} +``` + +### Step 5: Create lib.rs with re-exports + +Create `crates/amplifier-guest/src/lib.rs`: + +```rust +//! amplifier-guest: SDK for writing Amplifier WASM modules. +//! +//! This crate provides the types and macros needed to write a WASM module +//! that plugs into the Amplifier kernel. Module authors implement familiar +//! traits (Tool, HookHandler, etc.) and use the `export!` macro to generate +//! the WIT binding glue. +//! +//! # Example +//! +//! ```rust,ignore +//! use amplifier_guest::{Tool, ToolSpec, ToolResult}; +//! use serde_json::Value; +//! use std::collections::HashMap; +//! +//! struct EchoTool; +//! +//! impl Tool for EchoTool { +//! fn name(&self) -> &str { "echo" } +//! fn get_spec(&self) -> ToolSpec { +//! ToolSpec { +//! name: "echo".into(), +//! parameters: HashMap::new(), +//! description: Some("Echoes input back".into()), +//! } +//! } +//! fn execute(&self, input: Value) -> Result { +//! Ok(ToolResult { success: true, output: Some(input), error: None }) +//! } +//! } +//! +//! amplifier_guest::export_tool!(EchoTool); +//! ``` + +pub mod types; + +// Re-export all types at the crate root for convenience +pub use types::*; + +// Re-export serde_json::Value so module authors don't need a separate dependency +pub use serde_json::Value; +``` + +### Step 6: Verify the crate compiles + +```bash +cd amplifier-core +cargo check -p amplifier-guest +``` + +Expected: Compiles with no errors. There may be warnings about unused imports — that's fine at this stage. + +### Step 7: Commit + +```bash +cd amplifier-core +git add crates/amplifier-guest/ Cargo.toml +git commit -m "feat(wasm): scaffold amplifier-guest crate with core types + +New crate at crates/amplifier-guest/ for WASM module authors. +- types.rs: ToolSpec, ToolResult, HookResult, ApprovalRequest/Response, + ProviderInfo, ModelInfo, ChatRequest, ChatResponse +- All types mirror amplifier-core types (same field names, same serde) +- WIT file copied from wit/amplifier-modules.wit +- No traits or macros yet (added in Tasks 3-5)" +``` + +--- + +## Task 3: Guest Tool Trait + `export_tool!` Macro + +**What:** Add the `Tool` trait to the guest SDK and create the `export_tool!` macro that generates WIT binding glue. Module authors implement `Tool` and call `export_tool!(MyTool)` — the macro handles proto serialization and WIT export function generation. + +**Why:** This is the core developer experience. Module authors should never see WIT or proto bytes. + +**Files:** +- Modify: `crates/amplifier-guest/src/lib.rs` + +### Step 1: Add the Tool trait and export macro to lib.rs + +Open `crates/amplifier-guest/src/lib.rs` and replace its entire content with: + +```rust +//! amplifier-guest: SDK for writing Amplifier WASM modules. +//! +//! This crate provides the types and macros needed to write a WASM module +//! that plugs into the Amplifier kernel. Module authors implement familiar +//! traits (Tool, HookHandler, etc.) and use the `export_tool!` (etc.) macro +//! to generate the WIT binding glue. +//! +//! # Example: Tool module +//! +//! ```rust,ignore +//! use amplifier_guest::{Tool, ToolSpec, ToolResult}; +//! use serde_json::Value; +//! use std::collections::HashMap; +//! +//! struct EchoTool; +//! +//! impl Tool for EchoTool { +//! fn name(&self) -> &str { "echo" } +//! fn get_spec(&self) -> ToolSpec { +//! ToolSpec { +//! name: "echo".into(), +//! parameters: HashMap::new(), +//! description: Some("Echoes input back".into()), +//! } +//! } +//! fn execute(&self, input: Value) -> Result { +//! Ok(ToolResult { success: true, output: Some(input), error: None }) +//! } +//! } +//! +//! amplifier_guest::export_tool!(EchoTool); +//! ``` + +pub mod types; + +// Re-export all types at the crate root for convenience +pub use types::*; + +// Re-export serde_json::Value so module authors don't need a separate dependency +pub use serde_json::Value; + +// --------------------------------------------------------------------------- +// Guest traits — sync versions of the amplifier-core async traits +// --------------------------------------------------------------------------- + +/// Tool trait for WASM guest modules. +/// +/// Same method names as `amplifier_core::traits::Tool` but synchronous +/// (WASM execution is single-threaded from the guest's perspective). +pub trait Tool { + /// Unique name of this tool. + fn name(&self) -> &str; + + /// Return the tool's specification (name, parameters, description). + fn get_spec(&self) -> ToolSpec; + + /// Execute the tool with JSON input. Returns ToolResult or error string. + fn execute(&self, input: Value) -> Result; +} + +// --------------------------------------------------------------------------- +// export_tool! macro +// --------------------------------------------------------------------------- + +/// Generate WIT export bindings for a Tool implementation. +/// +/// This macro creates the extern functions that wasmtime calls when the host +/// invokes the WASM component's `tool` interface. It handles: +/// - Serializing `ToolSpec` to JSON bytes for `get-spec` +/// - Deserializing JSON bytes to `Value` for `execute` input +/// - Serializing `ToolResult` to JSON bytes for `execute` output +/// +/// # Usage +/// +/// ```rust,ignore +/// struct MyTool; +/// impl amplifier_guest::Tool for MyTool { /* ... */ } +/// amplifier_guest::export_tool!(MyTool); +/// ``` +#[macro_export] +macro_rules! export_tool { + ($tool_type:ty) => { + // Static instance of the tool — created once, reused for all calls. + static TOOL_INSTANCE: std::sync::OnceLock<$tool_type> = std::sync::OnceLock::new(); + + fn get_tool() -> &'static $tool_type { + TOOL_INSTANCE.get_or_init(|| <$tool_type>::default()) + } + + // WIT export: get-spec() -> list + #[no_mangle] + pub extern "C" fn __amplifier_tool_get_spec_len() -> u32 { + let tool = get_tool(); + let spec = <$tool_type as $crate::Tool>::get_spec(tool); + let bytes = serde_json::to_vec(&spec).unwrap_or_default(); + bytes.len() as u32 + } + + // The actual export function that the WIT bindgen on the host side calls. + // For the MVP, we use JSON serialization (not proto) across the boundary + // because both sides already have serde_json. Proto conversion happens + // on the host side where prost + amplifier_module proto types are available. + #[no_mangle] + pub extern "C" fn __amplifier_tool_get_spec(ptr: *mut u8) { + let tool = get_tool(); + let spec = <$tool_type as $crate::Tool>::get_spec(tool); + let bytes = serde_json::to_vec(&spec).unwrap_or_default(); + unsafe { + std::ptr::copy_nonoverlapping(bytes.as_ptr(), ptr, bytes.len()); + } + } + + // NOTE: The actual WIT component model export mechanism uses + // wit-bindgen generated code, not raw #[no_mangle] externs. + // The above is a simplified illustration. The real implementation + // will use wit-bindgen's `generate!` macro once the guest crate + // build pipeline is established. See the build-fixtures.sh script + // in Task 19 for the full compilation flow. + }; +} +``` + +> **IMPORTANT NOTE FOR IMPLEMENTER:** The `export_tool!` macro above is a **simplified scaffold**. The real Component Model export mechanism uses `wit-bindgen::generate!` to create proper component exports. The exact macro internals will need adjustment when you compile the first test fixture (Task 6) and discover the precise wit-bindgen API. The *interface* to the module author (implement `Tool`, call `export_tool!`) will NOT change — only the macro internals. This is why we build test fixtures (Task 6) immediately after the guest SDK — to validate the macro works end-to-end. + +### Step 2: Verify compilation + +```bash +cd amplifier-core +cargo check -p amplifier-guest +``` + +Expected: Compiles with no errors. + +### Step 3: Commit + +```bash +cd amplifier-core +git add crates/amplifier-guest/src/lib.rs +git commit -m "feat(wasm): add guest Tool trait and export_tool! macro + +- Tool trait: sync version of amplifier_core::Tool (name, get_spec, execute) +- export_tool! macro: generates WIT export bindings for Tool implementations +- Module authors implement Tool + call export_tool!(MyTool) — no WIT/proto exposure" +``` + +--- + +## Task 4: Guest Traits for HookHandler, ContextManager, ApprovalProvider + +**What:** Add Tier 1 guest traits and their `export_*!` macros. Same pattern as Task 3 but for the three remaining pure-compute module types. + +**Files:** +- Modify: `crates/amplifier-guest/src/lib.rs` + +### Step 1: Add the three Tier 1 traits and macros + +Open `crates/amplifier-guest/src/lib.rs`. Add the following AFTER the `export_tool!` macro definition (before the closing of the file): + +```rust +// --------------------------------------------------------------------------- +// HookHandler trait +// --------------------------------------------------------------------------- + +/// HookHandler trait for WASM guest modules. +/// +/// Same method names as `amplifier_core::traits::HookHandler` but synchronous. +pub trait HookHandler { + /// Handle a lifecycle event. Returns HookResult or error string. + fn handle(&self, event: &str, data: Value) -> Result; +} + +/// Generate WIT export bindings for a HookHandler implementation. +#[macro_export] +macro_rules! export_hook { + ($hook_type:ty) => { + // Placeholder — real implementation uses wit-bindgen generate! + // Same pattern as export_tool! but for the hook-handler interface. + static HOOK_INSTANCE: std::sync::OnceLock<$hook_type> = std::sync::OnceLock::new(); + + fn get_hook() -> &'static $hook_type { + HOOK_INSTANCE.get_or_init(|| <$hook_type>::default()) + } + }; +} + +// --------------------------------------------------------------------------- +// ContextManager trait +// --------------------------------------------------------------------------- + +/// ContextManager trait for WASM guest modules. +/// +/// Same method names as `amplifier_core::traits::ContextManager` but synchronous. +/// Messages are represented as `serde_json::Value` (JSON objects). +pub trait ContextManager { + /// Add a message to context. + fn add_message(&self, message: Value) -> Result<(), String>; + + /// Get all messages (raw, uncompacted). + fn get_messages(&self) -> Result, String>; + + /// Get messages ready for an LLM request (with optional budget). + fn get_messages_for_request(&self, request: Value) -> Result, String>; + + /// Replace all messages. + fn set_messages(&self, messages: Vec) -> Result<(), String>; + + /// Clear all messages. + fn clear(&self) -> Result<(), String>; +} + +/// Generate WIT export bindings for a ContextManager implementation. +#[macro_export] +macro_rules! export_context { + ($ctx_type:ty) => { + // Placeholder — real implementation uses wit-bindgen generate! + static CTX_INSTANCE: std::sync::OnceLock<$ctx_type> = std::sync::OnceLock::new(); + + fn get_context() -> &'static $ctx_type { + CTX_INSTANCE.get_or_init(|| <$ctx_type>::default()) + } + }; +} + +// --------------------------------------------------------------------------- +// ApprovalProvider trait +// --------------------------------------------------------------------------- + +/// ApprovalProvider trait for WASM guest modules. +/// +/// Same method names as `amplifier_core::traits::ApprovalProvider` but synchronous. +pub trait ApprovalProvider { + /// Request user approval. Returns ApprovalResponse or error string. + fn request_approval(&self, request: ApprovalRequest) -> Result; +} + +/// Generate WIT export bindings for an ApprovalProvider implementation. +#[macro_export] +macro_rules! export_approval { + ($approval_type:ty) => { + // Placeholder — real implementation uses wit-bindgen generate! + static APPROVAL_INSTANCE: std::sync::OnceLock<$approval_type> = std::sync::OnceLock::new(); + + fn get_approval() -> &'static $approval_type { + APPROVAL_INSTANCE.get_or_init(|| <$approval_type>::default()) + } + }; +} +``` + +### Step 2: Verify compilation + +```bash +cd amplifier-core +cargo check -p amplifier-guest +``` + +Expected: Compiles with no errors. + +### Step 3: Commit + +```bash +cd amplifier-core +git add crates/amplifier-guest/src/lib.rs +git commit -m "feat(wasm): add guest HookHandler, ContextManager, ApprovalProvider traits + +- HookHandler: handle(event, data) -> HookResult +- ContextManager: add_message, get_messages, get_messages_for_request, set_messages, clear +- ApprovalProvider: request_approval(ApprovalRequest) -> ApprovalResponse +- export_hook!, export_context!, export_approval! macros (scaffolds)" +``` + +--- + +## Task 5: Guest Traits for Provider + Orchestrator (Tier 2) + +**What:** Add Provider and Orchestrator guest traits plus kernel-service import wrappers. These are Tier 2 — they need host capabilities (HTTP for Provider, kernel callbacks for Orchestrator). + +**Files:** +- Modify: `crates/amplifier-guest/src/lib.rs` + +### Step 1: Add Tier 2 traits and kernel module + +Open `crates/amplifier-guest/src/lib.rs`. Add the following AFTER the `export_approval!` macro: + +```rust +// --------------------------------------------------------------------------- +// Provider trait (Tier 2 — needs WASI HTTP) +// --------------------------------------------------------------------------- + +/// Provider trait for WASM guest modules. +/// +/// Same method names as `amplifier_core::traits::Provider` but synchronous. +pub trait Provider { + /// Provider identifier. + fn name(&self) -> &str; + + /// Return provider metadata. + fn get_info(&self) -> ProviderInfo; + + /// List available models. + fn list_models(&self) -> Result, String>; + + /// Generate a completion from a chat request. + fn complete(&self, request: Value) -> Result; + + /// Extract tool calls from a response. + fn parse_tool_calls(&self, response: &ChatResponse) -> Vec; +} + +/// Generate WIT export bindings for a Provider implementation. +#[macro_export] +macro_rules! export_provider { + ($provider_type:ty) => { + // Placeholder — real implementation uses wit-bindgen generate! + static PROVIDER_INSTANCE: std::sync::OnceLock<$provider_type> = std::sync::OnceLock::new(); + + fn get_provider() -> &'static $provider_type { + PROVIDER_INSTANCE.get_or_init(|| <$provider_type>::default()) + } + }; +} + +// --------------------------------------------------------------------------- +// Orchestrator trait (Tier 2 — needs kernel-service imports) +// --------------------------------------------------------------------------- + +/// Orchestrator trait for WASM guest modules. +/// +/// Same method names as `amplifier_core::traits::Orchestrator` but synchronous. +/// The orchestrator calls back to the host via `amplifier_guest::kernel::*` +/// functions (which wrap the kernel-service WIT imports). +pub trait Orchestrator { + /// Execute the agent loop for a single prompt. + fn execute(&self, prompt: String) -> Result; +} + +/// Generate WIT export bindings for an Orchestrator implementation. +#[macro_export] +macro_rules! export_orchestrator { + ($orch_type:ty) => { + // Placeholder — real implementation uses wit-bindgen generate! + static ORCH_INSTANCE: std::sync::OnceLock<$orch_type> = std::sync::OnceLock::new(); + + fn get_orchestrator() -> &'static $orch_type { + ORCH_INSTANCE.get_or_init(|| <$orch_type>::default()) + } + }; +} + +// --------------------------------------------------------------------------- +// Kernel service imports (host callbacks for Tier 2 modules) +// --------------------------------------------------------------------------- + +/// Typed wrappers around the kernel-service WIT host imports. +/// +/// Orchestrator and Provider modules call these functions to access +/// kernel capabilities (execute tools, complete with providers, etc.). +/// +/// # Example +/// +/// ```rust,ignore +/// use amplifier_guest::kernel; +/// +/// // Inside an Orchestrator::execute() implementation: +/// let tool_result = kernel::execute_tool("echo", &serde_json::json!({"text": "hello"}))?; +/// let chat_response = kernel::complete_with_provider("anthropic", &chat_request)?; +/// ``` +pub mod kernel { + use serde_json::Value; + use crate::types::{ToolResult, HookResult}; + + /// Execute a tool by name via the kernel. + /// + /// Wraps the `kernel-service.execute-tool` WIT import. + pub fn execute_tool(name: &str, input: &Value) -> Result { + // Placeholder — real implementation calls the WIT import function + // generated by wit-bindgen. The function: + // 1. Serializes `input` to JSON bytes + // 2. Calls the host's execute-tool(name, bytes) + // 3. Deserializes the returned bytes to ToolResult + let _ = (name, input); + Err("kernel::execute_tool not yet wired to WIT imports".into()) + } + + /// Complete a chat request with a named provider via the kernel. + /// + /// Wraps the `kernel-service.complete-with-provider` WIT import. + pub fn complete_with_provider(name: &str, request: &Value) -> Result { + let _ = (name, request); + Err("kernel::complete_with_provider not yet wired to WIT imports".into()) + } + + /// Emit a hook event via the kernel. + /// + /// Wraps the `kernel-service.emit-hook` WIT import. + pub fn emit_hook(event: &str, data: &Value) -> Result { + let _ = (event, data); + Err("kernel::emit_hook not yet wired to WIT imports".into()) + } + + /// Get all messages from the kernel's context manager. + /// + /// Wraps the `kernel-service.get-messages` WIT import. + pub fn get_messages() -> Result, String> { + Err("kernel::get_messages not yet wired to WIT imports".into()) + } + + /// Add a message to the kernel's context manager. + /// + /// Wraps the `kernel-service.add-message` WIT import. + pub fn add_message(message: &Value) -> Result<(), String> { + let _ = message; + Err("kernel::add_message not yet wired to WIT imports".into()) + } + + /// Get a named capability from the kernel. + /// + /// Wraps the `kernel-service.get-capability` WIT import. + pub fn get_capability(name: &str) -> Result { + let _ = name; + Err("kernel::get_capability not yet wired to WIT imports".into()) + } + + /// Register a named capability with the kernel. + /// + /// Wraps the `kernel-service.register-capability` WIT import. + pub fn register_capability(name: &str, value: &Value) -> Result<(), String> { + let _ = (name, value); + Err("kernel::register_capability not yet wired to WIT imports".into()) + } +} +``` + +### Step 2: Verify compilation + +```bash +cd amplifier-core +cargo check -p amplifier-guest +``` + +Expected: Compiles with no errors. + +### Step 3: Commit + +```bash +cd amplifier-core +git add crates/amplifier-guest/src/lib.rs +git commit -m "feat(wasm): add guest Provider, Orchestrator traits + kernel service imports + +- Provider: name, get_info, list_models, complete, parse_tool_calls +- Orchestrator: execute(prompt) -> String +- kernel module: execute_tool, complete_with_provider, emit_hook, + get_messages, add_message, get_capability, register_capability +- export_provider!, export_orchestrator! macros (scaffolds) +- Kernel functions are placeholders until WIT import wiring (Task 15)" +``` + +--- + +## Task 6: Echo-Tool Test Fixture + +**What:** Create a minimal Rust crate that implements `amplifier_guest::Tool`, compile it to `.wasm`, and commit the binary as a test fixture. This is the first end-to-end validation of the guest SDK. + +**Why:** We need real `.wasm` files to test the bridges in Tasks 10-13. This fixture echoes its input back as output — the simplest possible tool. + +**Files:** +- Create: `tests/fixtures/wasm/src/echo-tool/Cargo.toml` +- Create: `tests/fixtures/wasm/src/echo-tool/src/lib.rs` +- Create: `tests/fixtures/wasm/echo-tool.wasm` (compiled binary) + +### Step 1: Install cargo-component (if not already installed) + +```bash +cargo install cargo-component +``` + +> **NOTE:** If this fails, check [cargo-component releases](https://github.com/bytecodealliance/cargo-component/releases) for version compatibility with wasmtime 42. + +### Step 2: Create the echo-tool Cargo.toml + +Create `tests/fixtures/wasm/src/echo-tool/Cargo.toml`: + +```toml +[package] +name = "echo-tool" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../crates/amplifier-guest" } +serde_json = "1" + +[package.metadata.component] +package = "amplifier:echo-tool" + +[package.metadata.component.target] +world = "tool-module" +path = "../../../../wit/amplifier-modules.wit" +``` + +### Step 3: Create the echo-tool implementation + +Create `tests/fixtures/wasm/src/echo-tool/src/lib.rs`: + +```rust +//! Echo tool — returns input as output. +//! +//! Minimal test fixture for validating the WASM Tool bridge. + +use amplifier_guest::{Tool, ToolSpec, ToolResult, Value}; +use std::collections::HashMap; + +#[derive(Default)] +struct EchoTool; + +impl Tool for EchoTool { + fn name(&self) -> &str { + "echo-tool" + } + + fn get_spec(&self) -> ToolSpec { + ToolSpec { + name: "echo-tool".into(), + parameters: { + let mut params = HashMap::new(); + params.insert("type".into(), serde_json::json!("object")); + params.insert( + "properties".into(), + serde_json::json!({"input": {"type": "string"}}), + ); + params + }, + description: Some("Echoes input back as output".into()), + } + } + + fn execute(&self, input: Value) -> Result { + Ok(ToolResult { + success: true, + output: Some(input), + error: None, + }) + } +} + +amplifier_guest::export_tool!(EchoTool); +``` + +### Step 4: Compile to WASM + +```bash +cd amplifier-core/tests/fixtures/wasm/src/echo-tool +cargo component build --release +``` + +Expected: Produces `target/wasm32-wasip2/release/echo_tool.wasm` + +> **TROUBLESHOOTING:** If `cargo component` fails because the `export_tool!` macro doesn't match the wit-bindgen component model expectations, you'll need to update the macro internals in `crates/amplifier-guest/src/lib.rs`. The macro's *internal implementation* may need to use `wit_bindgen::generate!` instead of raw `#[no_mangle]` externs. Adjust until the `.wasm` file compiles. The module author interface (implement `Tool`, call `export_tool!`) should NOT change. + +### Step 5: Copy the .wasm binary to fixtures + +```bash +cp target/wasm32-wasip2/release/echo_tool.wasm ../../../../tests/fixtures/wasm/echo-tool.wasm +``` + +> **NOTE:** The exact output path depends on `cargo component`'s target directory. Adjust as needed. Use `find target -name "*.wasm"` to locate it. + +### Step 6: Verify the .wasm file exists and has non-zero size + +```bash +cd amplifier-core +ls -la tests/fixtures/wasm/echo-tool.wasm +``` + +Expected: File exists, size > 1000 bytes. + +### Step 7: Commit + +```bash +cd amplifier-core +git add tests/fixtures/wasm/src/echo-tool/ tests/fixtures/wasm/echo-tool.wasm +git commit -m "test(wasm): add echo-tool test fixture + +Minimal WASM Tool module compiled from amplifier-guest SDK. +- Implements Tool trait (name, get_spec, execute) +- execute() echoes input back as output +- Pre-compiled .wasm binary committed for E2E tests" +``` + +--- + +## Task 7: Deny-Hook Test Fixture + +**What:** Create a HookHandler fixture that always returns `HookAction::Deny`. Compile to `.wasm`. + +**Files:** +- Create: `tests/fixtures/wasm/src/deny-hook/Cargo.toml` +- Create: `tests/fixtures/wasm/src/deny-hook/src/lib.rs` +- Create: `tests/fixtures/wasm/deny-hook.wasm` + +### Step 1: Create Cargo.toml + +Create `tests/fixtures/wasm/src/deny-hook/Cargo.toml`: + +```toml +[package] +name = "deny-hook" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../crates/amplifier-guest" } +serde_json = "1" + +[package.metadata.component] +package = "amplifier:deny-hook" + +[package.metadata.component.target] +world = "hook-module" +path = "../../../../wit/amplifier-modules.wit" +``` + +### Step 2: Create the implementation + +Create `tests/fixtures/wasm/src/deny-hook/src/lib.rs`: + +```rust +//! Deny hook — always returns HookAction::Deny. +//! +//! Test fixture for validating the WASM HookHandler bridge. + +use amplifier_guest::{HookHandler, HookResult, HookAction, Value}; + +#[derive(Default)] +struct DenyHook; + +impl HookHandler for DenyHook { + fn handle(&self, _event: &str, _data: Value) -> Result { + Ok(HookResult { + action: HookAction::Deny, + reason: Some("Denied by WASM hook".into()), + ..Default::default() + }) + } +} + +amplifier_guest::export_hook!(DenyHook); +``` + +### Step 3: Compile and copy + +```bash +cd amplifier-core/tests/fixtures/wasm/src/deny-hook +cargo component build --release +# Copy the .wasm to the fixtures directory (adjust path as needed) +cp target/wasm32-wasip2/release/deny_hook.wasm ../../deny-hook.wasm +``` + +### Step 4: Commit + +```bash +cd amplifier-core +git add tests/fixtures/wasm/src/deny-hook/ tests/fixtures/wasm/deny-hook.wasm +git commit -m "test(wasm): add deny-hook test fixture + +WASM HookHandler that always returns HookAction::Deny. +Pre-compiled .wasm binary committed for E2E tests." +``` + +--- + +## Task 8: Memory-Context Test Fixture + +**What:** Create a ContextManager fixture with an in-memory `Vec` message store. Tests stateful multi-call WASM modules. + +**Files:** +- Create: `tests/fixtures/wasm/src/memory-context/Cargo.toml` +- Create: `tests/fixtures/wasm/src/memory-context/src/lib.rs` +- Create: `tests/fixtures/wasm/memory-context.wasm` + +### Step 1: Create Cargo.toml + +Create `tests/fixtures/wasm/src/memory-context/Cargo.toml`: + +```toml +[package] +name = "memory-context" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../crates/amplifier-guest" } +serde_json = "1" + +[package.metadata.component] +package = "amplifier:memory-context" + +[package.metadata.component.target] +world = "context-module" +path = "../../../../wit/amplifier-modules.wit" +``` + +### Step 2: Create the implementation + +Create `tests/fixtures/wasm/src/memory-context/src/lib.rs`: + +```rust +//! Memory context — in-memory message store. +//! +//! Test fixture for validating the WASM ContextManager bridge. +//! Uses a static Vec to persist messages across calls +//! (WASM module instances are persistent within a Store). + +use amplifier_guest::{ContextManager, Value}; +use std::sync::Mutex; + +static MESSAGES: Mutex> = Mutex::new(Vec::new()); + +#[derive(Default)] +struct MemoryContext; + +impl ContextManager for MemoryContext { + fn add_message(&self, message: Value) -> Result<(), String> { + MESSAGES + .lock() + .map_err(|e| format!("lock poisoned: {e}"))? + .push(message); + Ok(()) + } + + fn get_messages(&self) -> Result, String> { + Ok(MESSAGES + .lock() + .map_err(|e| format!("lock poisoned: {e}"))? + .clone()) + } + + fn get_messages_for_request(&self, _request: Value) -> Result, String> { + // Simple implementation: return all messages (no budget trimming) + self.get_messages() + } + + fn set_messages(&self, messages: Vec) -> Result<(), String> { + *MESSAGES + .lock() + .map_err(|e| format!("lock poisoned: {e}"))? = messages; + Ok(()) + } + + fn clear(&self) -> Result<(), String> { + MESSAGES + .lock() + .map_err(|e| format!("lock poisoned: {e}"))? + .clear(); + Ok(()) + } +} + +amplifier_guest::export_context!(MemoryContext); +``` + +### Step 3: Compile and copy + +```bash +cd amplifier-core/tests/fixtures/wasm/src/memory-context +cargo component build --release +cp target/wasm32-wasip2/release/memory_context.wasm ../../memory-context.wasm +``` + +### Step 4: Commit + +```bash +cd amplifier-core +git add tests/fixtures/wasm/src/memory-context/ tests/fixtures/wasm/memory-context.wasm +git commit -m "test(wasm): add memory-context test fixture + +WASM ContextManager with in-memory Vec store. +Tests stateful multi-call WASM modules (add, get, set, clear). +Pre-compiled .wasm binary committed for E2E tests." +``` + +--- + +## Task 9: Auto-Approve Test Fixture + +**What:** Create an ApprovalProvider fixture that always approves. Compile to `.wasm`. + +**Files:** +- Create: `tests/fixtures/wasm/src/auto-approve/Cargo.toml` +- Create: `tests/fixtures/wasm/src/auto-approve/src/lib.rs` +- Create: `tests/fixtures/wasm/auto-approve.wasm` + +### Step 1: Create Cargo.toml + +Create `tests/fixtures/wasm/src/auto-approve/Cargo.toml`: + +```toml +[package] +name = "auto-approve" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../crates/amplifier-guest" } +serde_json = "1" + +[package.metadata.component] +package = "amplifier:auto-approve" + +[package.metadata.component.target] +world = "approval-module" +path = "../../../../wit/amplifier-modules.wit" +``` + +### Step 2: Create the implementation + +Create `tests/fixtures/wasm/src/auto-approve/src/lib.rs`: + +```rust +//! Auto-approve — always approves requests. +//! +//! Test fixture for validating the WASM ApprovalProvider bridge. + +use amplifier_guest::{ApprovalProvider, ApprovalRequest, ApprovalResponse}; + +#[derive(Default)] +struct AutoApprove; + +impl ApprovalProvider for AutoApprove { + fn request_approval(&self, _request: ApprovalRequest) -> Result { + Ok(ApprovalResponse { + approved: true, + reason: Some("Auto-approved by WASM module".into()), + remember: false, + }) + } +} + +amplifier_guest::export_approval!(AutoApprove); +``` + +### Step 3: Compile and copy + +```bash +cd amplifier-core/tests/fixtures/wasm/src/auto-approve +cargo component build --release +cp target/wasm32-wasip2/release/auto_approve.wasm ../../auto-approve.wasm +``` + +### Step 4: Commit + +```bash +cd amplifier-core +git add tests/fixtures/wasm/src/auto-approve/ tests/fixtures/wasm/auto-approve.wasm +git commit -m "test(wasm): add auto-approve test fixture + +WASM ApprovalProvider that always returns approved=true. +Pre-compiled .wasm binary committed for E2E tests." +``` + +--- + +## Task 10: Rewrite `WasmToolBridge` + +**What:** Replace the stub `WasmToolBridge` (98 lines, `execute()` returns hard error) with a real Component Model implementation that loads the `echo-tool.wasm` fixture and passes end-to-end tests. + +**Why:** This is the first real bridge. It sets the pattern that Tasks 11-13 and 16-17 will follow. + +**Files:** +- Rewrite: `crates/amplifier-core/src/bridges/wasm_tool.rs` +- Modify: `crates/amplifier-core/src/bridges/mod.rs` (already has `wasm_tool` — no change needed) + +### Step 1: Write the failing E2E test first + +Before rewriting the bridge, add a test that loads the echo-tool.wasm fixture and calls `execute()`. This test will FAIL with the current stub. + +Create or update the test at the BOTTOM of `crates/amplifier-core/src/bridges/wasm_tool.rs`, replacing the existing `#[cfg(test)] mod tests` block: + +```rust +#[cfg(test)] +mod tests { + use super::*; + use std::sync::Arc; + + #[allow(dead_code)] + fn assert_tool_trait_object(_: Arc) {} + + /// Compile-time check: WasmToolBridge satisfies Arc. + #[allow(dead_code)] + fn wasm_tool_bridge_is_tool() { + fn _check(bridge: WasmToolBridge) { + assert_tool_trait_object(Arc::new(bridge)); + } + } + + #[test] + fn load_echo_tool_from_bytes() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/echo-tool.wasm") + .expect("echo-tool.wasm fixture not found — run Task 6 first"); + + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let bridge = WasmToolBridge::from_bytes(&wasm_bytes, engine.inner()) + .expect("should load echo-tool.wasm"); + + assert_eq!(bridge.name(), "echo-tool"); + let spec = bridge.get_spec(); + assert_eq!(spec.name, "echo-tool"); + assert!(spec.description.is_some()); + } + + #[tokio::test] + async fn echo_tool_execute_roundtrip() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/echo-tool.wasm") + .expect("echo-tool.wasm fixture not found — run Task 6 first"); + + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let bridge = WasmToolBridge::from_bytes(&wasm_bytes, engine.inner()).unwrap(); + + let input = serde_json::json!({"message": "hello from test"}); + let result = bridge.execute(input.clone()).await; + let result = result.expect("execute should succeed"); + + assert!(result.success); + assert_eq!(result.output, Some(input)); + } +} +``` + +### Step 2: Run the tests to see them fail + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- wasm_tool --verbose +``` + +Expected: Tests FAIL because `from_bytes` only accepts 1 argument in the current stub, and `execute()` returns a hard error. + +### Step 3: Rewrite the bridge implementation + +Replace the ENTIRE content of `crates/amplifier-core/src/bridges/wasm_tool.rs` with: + +```rust +//! WASM bridge for sandboxed tool modules. +//! +//! [`WasmToolBridge`] loads a compiled WASM component via wasmtime's +//! Component Model and implements the [`Tool`] trait, enabling sandboxed +//! in-process tool execution. +//! +//! Uses JSON serialization across the WASM boundary (list in WIT). +//! The host serializes inputs to JSON bytes, calls the WASM export, +//! and deserializes the JSON bytes result. +//! +//! Gated behind the `wasm` feature flag. + +use std::collections::HashMap; +use std::future::Future; +use std::pin::Pin; +use std::sync::Arc; + +use serde_json::Value; +use wasmtime::component::{Component, Linker, Val}; +use wasmtime::{Engine, Store}; + +use crate::errors::ToolError; +use crate::messages::ToolSpec; +use crate::models::ToolResult; +use crate::traits::Tool; + +/// WASM state stored in the wasmtime Store. +/// +/// Currently empty — Tier 1 modules don't need host state. +/// Tier 2 modules (Provider, Orchestrator) will add fields here. +pub(crate) struct WasmState; + +/// A bridge that loads a WASM component and exposes it as a native [`Tool`]. +/// +/// The WASM component is compiled once via wasmtime and instantiated +/// per call. Uses JSON serialization across the WASM boundary (same +/// data format as the guest SDK types). +/// +/// # Pattern +/// +/// Follows the same pattern as [`GrpcToolBridge`](super::grpc_tool::GrpcToolBridge): +/// hold client/instance, serialize inputs, call export, deserialize result, +/// implement the `Tool` trait. The key difference: no network, no process +/// management. The `.wasm` binary is loaded in-process. +pub struct WasmToolBridge { + engine: Arc, + component: Component, + name: String, + spec: ToolSpec, +} + +impl WasmToolBridge { + /// Load a WASM tool from raw bytes. + /// + /// Compiles the WASM component, instantiates it once to call `get-spec`, + /// caches the name and spec, then stores the component for future + /// `execute()` calls. + /// + /// # Arguments + /// + /// * `wasm_bytes` — Raw `.wasm` component binary. + /// * `engine` — Shared wasmtime engine (from `WasmEngine::inner()`). + pub fn from_bytes( + wasm_bytes: &[u8], + engine: Arc, + ) -> Result> { + let component = Component::new(&engine, wasm_bytes)?; + + // Create a linker and store to call get-spec once at load time + let linker: Linker = Linker::new(&engine); + let mut store = Store::new(&engine, WasmState); + let instance = linker.instantiate(&mut store, &component)?; + + // Call the get-spec export to cache the tool's name and spec + let get_spec_fn = instance + .get_typed_func::<(), (Vec,)>(&mut store, "get-spec") + .or_else(|_| { + // Try the interface-qualified name + instance.get_typed_func::<(), (Vec,)>( + &mut store, + "amplifier:modules/tool#get-spec", + ) + })?; + + let (spec_bytes,) = get_spec_fn.call(&mut store, ())?; + + let spec: ToolSpec = serde_json::from_slice(&spec_bytes).map_err(|e| { + format!("Failed to deserialize ToolSpec from WASM module: {e}") + })?; + + let name = spec.name.clone(); + + Ok(Self { + engine, + component, + name, + spec, + }) + } + + /// Load a WASM tool from a file path. + pub fn from_file( + path: &std::path::Path, + engine: Arc, + ) -> Result> { + let wasm_bytes = std::fs::read(path)?; + Self::from_bytes(&wasm_bytes, engine) + } +} + +impl Tool for WasmToolBridge { + fn name(&self) -> &str { + &self.name + } + + fn description(&self) -> &str { + self.spec.description.as_deref().unwrap_or("WASM tool module") + } + + fn get_spec(&self) -> ToolSpec { + self.spec.clone() + } + + fn execute( + &self, + input: Value, + ) -> Pin> + Send + '_>> { + Box::pin(async move { + // Clone what we need for spawn_blocking (WASM is sync CPU work) + let engine = Arc::clone(&self.engine); + let component = self.component.clone(); + let input_bytes = serde_json::to_vec(&input).map_err(|e| ToolError::Other { + message: format!("Failed to serialize input: {e}"), + })?; + + let result = tokio::task::spawn_blocking(move || -> Result { + let linker: Linker = Linker::new(&engine); + let mut store = Store::new(&engine, WasmState); + let instance = linker + .instantiate(&mut store, &component) + .map_err(|e| ToolError::Other { + message: format!("WASM instantiation failed: {e}"), + })?; + + // Call the execute export + let execute_fn = instance + .get_typed_func::<(Vec,), (Result, String>,)>( + &mut store, + "execute", + ) + .or_else(|_| { + instance.get_typed_func::<(Vec,), (Result, String>,)>( + &mut store, + "amplifier:modules/tool#execute", + ) + }) + .map_err(|e| ToolError::Other { + message: format!("WASM export 'execute' not found: {e}"), + })?; + + let (result,) = execute_fn + .call(&mut store, (input_bytes,)) + .map_err(|e| ToolError::Other { + message: format!("WASM execute call failed: {e}"), + })?; + + match result { + Ok(output_bytes) => { + let tool_result: ToolResult = + serde_json::from_slice(&output_bytes).map_err(|e| ToolError::Other { + message: format!("Failed to deserialize ToolResult: {e}"), + })?; + Ok(tool_result) + } + Err(error_string) => Err(ToolError::Other { + message: format!("WASM tool returned error: {error_string}"), + }), + } + }) + .await + .map_err(|e| ToolError::Other { + message: format!("WASM task panicked: {e}"), + })??; + + Ok(result) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[allow(dead_code)] + fn assert_tool_trait_object(_: Arc) {} + + /// Compile-time check: WasmToolBridge satisfies Arc. + #[allow(dead_code)] + fn wasm_tool_bridge_is_tool() { + fn _check(bridge: WasmToolBridge) { + assert_tool_trait_object(Arc::new(bridge)); + } + } + + #[test] + fn load_echo_tool_from_bytes() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/echo-tool.wasm") + .expect("echo-tool.wasm fixture not found — run Task 6 first"); + + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let bridge = WasmToolBridge::from_bytes(&wasm_bytes, engine.inner()) + .expect("should load echo-tool.wasm"); + + assert_eq!(bridge.name(), "echo-tool"); + let spec = bridge.get_spec(); + assert_eq!(spec.name, "echo-tool"); + assert!(spec.description.is_some()); + } + + #[tokio::test] + async fn echo_tool_execute_roundtrip() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/echo-tool.wasm") + .expect("echo-tool.wasm fixture not found — run Task 6 first"); + + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let bridge = WasmToolBridge::from_bytes(&wasm_bytes, engine.inner()).unwrap(); + + let input = serde_json::json!({"message": "hello from test"}); + let result = bridge.execute(input.clone()).await; + let result = result.expect("execute should succeed"); + + assert!(result.success); + assert_eq!(result.output, Some(input)); + } +} +``` + +> **IMPORTANT NOTE FOR IMPLEMENTER:** The wasmtime Component Model API for typed function calls (`get_typed_func`) may have a different signature than shown above. The exact API depends on wasmtime 42's component model implementation. Common variations: +> - Function names may be interface-qualified: `"amplifier:modules/tool#get-spec"` instead of `"get-spec"` +> - The `Component` type may need `Component::from_binary()` instead of `Component::new()` +> - You may need to use `wasmtime::component::bindgen!` to generate strongly-typed bindings from the WIT file instead of manually looking up exports +> +> The TEST is the source of truth: if `load_echo_tool_from_bytes` and `echo_tool_execute_roundtrip` pass, the bridge works correctly. Adjust the implementation until the tests pass. + +### Step 4: Run the tests + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- wasm_tool --verbose +``` + +Expected: All tests pass: +``` +test bridges::wasm_tool::tests::load_echo_tool_from_bytes ... ok +test bridges::wasm_tool::tests::echo_tool_execute_roundtrip ... ok +``` + +### Step 5: Run clippy + +```bash +cd amplifier-core +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +Expected: Clean. + +### Step 6: Commit + +```bash +cd amplifier-core +git add crates/amplifier-core/src/bridges/wasm_tool.rs +git commit -m "feat(wasm): rewrite WasmToolBridge with Component Model support + +Replaces the stub (execute() returned hard error) with full implementation: +- from_bytes(wasm, engine): compile component, call get-spec, cache name/spec +- from_file(path, engine): convenience loader +- execute(): spawn_blocking, instantiate, call WASM export, deserialize result +- Uses shared Arc from WasmEngine +- JSON serialization across WASM boundary (list in WIT) +- E2E tests with echo-tool.wasm fixture: load + execute roundtrip" +``` + +--- + +## Task 11: `WasmHookBridge` + +**What:** Create `bridges/wasm_hook.rs` implementing `HookHandler` trait. Same pattern as `WasmToolBridge`. + +**Files:** +- Create: `crates/amplifier-core/src/bridges/wasm_hook.rs` +- Modify: `crates/amplifier-core/src/bridges/mod.rs` + +### Step 1: Add module to bridges/mod.rs + +Open `crates/amplifier-core/src/bridges/mod.rs`. Add after the `wasm_tool` line: + +```rust +#[cfg(feature = "wasm")] +pub mod wasm_hook; +``` + +### Step 2: Create the bridge with tests + +Create `crates/amplifier-core/src/bridges/wasm_hook.rs`: + +```rust +//! WASM bridge for sandboxed hook handler modules. +//! +//! [`WasmHookBridge`] loads a compiled WASM component via wasmtime's +//! Component Model and implements the [`HookHandler`] trait. +//! +//! Gated behind the `wasm` feature flag. + +use std::future::Future; +use std::pin::Pin; +use std::sync::Arc; + +use serde_json::Value; +use wasmtime::component::{Component, Linker}; +use wasmtime::{Engine, Store}; + +use super::wasm_tool::WasmState; +use crate::errors::HookError; +use crate::models::HookResult; +use crate::traits::HookHandler; + +/// A bridge that loads a WASM component and exposes it as a native [`HookHandler`]. +pub struct WasmHookBridge { + engine: Arc, + component: Component, +} + +impl WasmHookBridge { + /// Load a WASM hook handler from raw bytes. + pub fn from_bytes( + wasm_bytes: &[u8], + engine: Arc, + ) -> Result> { + let component = Component::new(&engine, wasm_bytes)?; + Ok(Self { engine, component }) + } + + /// Load a WASM hook handler from a file path. + pub fn from_file( + path: &std::path::Path, + engine: Arc, + ) -> Result> { + let wasm_bytes = std::fs::read(path)?; + Self::from_bytes(&wasm_bytes, engine) + } +} + +impl HookHandler for WasmHookBridge { + fn handle( + &self, + event: &str, + data: Value, + ) -> Pin> + Send + '_>> { + let event = event.to_string(); + Box::pin(async move { + let engine = Arc::clone(&self.engine); + let component = self.component.clone(); + let data_bytes = serde_json::to_vec(&data).map_err(|e| HookError::Other { + message: format!("Failed to serialize hook data: {e}"), + })?; + + let result = + tokio::task::spawn_blocking(move || -> Result { + let linker: Linker = Linker::new(&engine); + let mut store = Store::new(&engine, WasmState); + let instance = linker + .instantiate(&mut store, &component) + .map_err(|e| HookError::Other { + message: format!("WASM instantiation failed: {e}"), + })?; + + let handle_fn = instance + .get_typed_func::<(String, Vec), (Result, String>,)>( + &mut store, + "handle", + ) + .or_else(|_| { + instance.get_typed_func::<(String, Vec), (Result, String>,)>( + &mut store, + "amplifier:modules/hook-handler#handle", + ) + }) + .map_err(|e| HookError::Other { + message: format!("WASM export 'handle' not found: {e}"), + })?; + + let (result,) = handle_fn + .call(&mut store, (event, data_bytes)) + .map_err(|e| HookError::Other { + message: format!("WASM handle call failed: {e}"), + })?; + + match result { + Ok(output_bytes) => { + let hook_result: HookResult = + serde_json::from_slice(&output_bytes).map_err(|e| { + HookError::Other { + message: format!("Failed to deserialize HookResult: {e}"), + } + })?; + Ok(hook_result) + } + Err(error_string) => Err(HookError::Other { + message: format!("WASM hook returned error: {error_string}"), + }), + } + }) + .await + .map_err(|e| HookError::Other { + message: format!("WASM task panicked: {e}"), + })??; + + Ok(result) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::models::HookAction; + + #[allow(dead_code)] + fn assert_hook_trait_object(_: Arc) {} + + #[allow(dead_code)] + fn wasm_hook_bridge_is_hook_handler() { + fn _check(bridge: WasmHookBridge) { + assert_hook_trait_object(Arc::new(bridge)); + } + } + + #[tokio::test] + async fn deny_hook_returns_deny_action() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/deny-hook.wasm") + .expect("deny-hook.wasm fixture not found — run Task 7 first"); + + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let bridge = WasmHookBridge::from_bytes(&wasm_bytes, engine.inner()).unwrap(); + + let result = bridge + .handle("test:event", serde_json::json!({"key": "value"})) + .await + .expect("handle should succeed"); + + assert_eq!(result.action, HookAction::Deny); + assert!(result.reason.is_some()); + assert!(result.reason.unwrap().contains("Denied")); + } +} +``` + +### Step 3: Run tests + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- wasm_hook --verbose +``` + +Expected: `deny_hook_returns_deny_action` passes. + +### Step 4: Commit + +```bash +cd amplifier-core +git add crates/amplifier-core/src/bridges/wasm_hook.rs crates/amplifier-core/src/bridges/mod.rs +git commit -m "feat(wasm): add WasmHookBridge with Component Model support + +Implements HookHandler trait for WASM hook modules. +Same pattern as WasmToolBridge: spawn_blocking, instantiate, call export. +E2E test with deny-hook.wasm fixture verifies Deny action roundtrip." +``` + +--- + +## Task 12: `WasmContextBridge` + +**What:** Create `bridges/wasm_context.rs` implementing `ContextManager` trait. This tests stateful multi-call WASM modules (add → get → clear → get). + +**Files:** +- Create: `crates/amplifier-core/src/bridges/wasm_context.rs` +- Modify: `crates/amplifier-core/src/bridges/mod.rs` + +### Step 1: Add module to bridges/mod.rs + +Add after the `wasm_hook` line: + +```rust +#[cfg(feature = "wasm")] +pub mod wasm_context; +``` + +### Step 2: Create the bridge with tests + +Create `crates/amplifier-core/src/bridges/wasm_context.rs`. Follow the exact same pattern as `WasmHookBridge` but: + +- Implement `ContextManager` trait (5 methods: `add_message`, `get_messages`, `get_messages_for_request`, `set_messages`, `clear`) +- Hold a persistent `Store` behind a `tokio::sync::Mutex` (NOT a new store per call — the context module is stateful and needs the same WASM instance across calls) +- E2E test with `memory-context.wasm`: + 1. `add_message(json!({"role": "user", "content": "hello"}))` + 2. `get_messages()` → verify 1 message + 3. `add_message(json!({"role": "assistant", "content": "hi"}))` + 4. `get_messages()` → verify 2 messages + 5. `clear()` + 6. `get_messages()` → verify 0 messages + +> **KEY DIFFERENCE from other bridges:** Context is STATEFUL. The same WASM instance must persist across calls. Use a `Mutex>` + pre-created `Instance` stored on the bridge struct, NOT a new store/instance per call. + +### Step 3: Run tests + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- wasm_context --verbose +``` + +Expected: The stateful roundtrip test passes. + +### Step 4: Commit + +```bash +cd amplifier-core +git add crates/amplifier-core/src/bridges/wasm_context.rs crates/amplifier-core/src/bridges/mod.rs +git commit -m "feat(wasm): add WasmContextBridge with persistent WASM state + +Implements ContextManager trait for WASM context modules. +Key difference from other bridges: uses persistent Store + Instance +(behind Mutex) since context is stateful across calls. +E2E test with memory-context.wasm: add/get/clear roundtrip." +``` + +--- + +## Task 13: `WasmApprovalBridge` + +**What:** Create `bridges/wasm_approval.rs` implementing `ApprovalProvider` trait. Same pattern as `WasmHookBridge` (stateless, new instance per call). + +**Files:** +- Create: `crates/amplifier-core/src/bridges/wasm_approval.rs` +- Modify: `crates/amplifier-core/src/bridges/mod.rs` + +### Step 1: Add module to bridges/mod.rs + +Add after the `wasm_context` line: + +```rust +#[cfg(feature = "wasm")] +pub mod wasm_approval; +``` + +### Step 2: Create the bridge with tests + +Create `crates/amplifier-core/src/bridges/wasm_approval.rs`. Follow `WasmHookBridge` pattern: + +- Implement `ApprovalProvider` trait (`request_approval`) +- Serialize `ApprovalRequest` to JSON bytes, call WASM `request-approval` export, deserialize `ApprovalResponse` +- E2E test with `auto-approve.wasm`: call `request_approval`, verify `approved == true` + +### Step 3: Run tests + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- wasm_approval --verbose +``` + +### Step 4: Commit + +```bash +cd amplifier-core +git add crates/amplifier-core/src/bridges/wasm_approval.rs crates/amplifier-core/src/bridges/mod.rs +git commit -m "feat(wasm): add WasmApprovalBridge with Component Model support + +Implements ApprovalProvider trait for WASM approval modules. +E2E test with auto-approve.wasm verifies approval roundtrip." +``` + +--- + +## Task 14: Echo-Provider Test Fixture + +**What:** Create a Provider fixture that returns a canned `ChatResponse`. No real HTTP — just proves the Provider interface works across the WASM boundary. + +**Files:** +- Create: `tests/fixtures/wasm/src/echo-provider/Cargo.toml` +- Create: `tests/fixtures/wasm/src/echo-provider/src/lib.rs` +- Create: `tests/fixtures/wasm/echo-provider.wasm` + +### Step 1: Create Cargo.toml + +Create `tests/fixtures/wasm/src/echo-provider/Cargo.toml`: + +```toml +[package] +name = "echo-provider" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../crates/amplifier-guest" } +serde_json = "1" + +[package.metadata.component] +package = "amplifier:echo-provider" + +[package.metadata.component.target] +world = "provider-module" +path = "../../../../wit/amplifier-modules.wit" +``` + +### Step 2: Create the implementation + +Create `tests/fixtures/wasm/src/echo-provider/src/lib.rs`: + +```rust +//! Echo provider — returns a canned ChatResponse. +//! +//! Test fixture for validating the WASM Provider bridge. +//! Does not make real HTTP calls — just proves the interface roundtrip. + +use amplifier_guest::{Provider, ProviderInfo, ModelInfo, ChatResponse, Value}; +use std::collections::HashMap; + +#[derive(Default)] +struct EchoProvider; + +impl Provider for EchoProvider { + fn name(&self) -> &str { + "echo-provider" + } + + fn get_info(&self) -> ProviderInfo { + ProviderInfo { + id: "echo-provider".into(), + display_name: "Echo Provider".into(), + credential_env_vars: vec![], + capabilities: vec!["tools".into()], + defaults: HashMap::new(), + } + } + + fn list_models(&self) -> Result, String> { + Ok(vec![ModelInfo { + id: "echo-model".into(), + display_name: "Echo Model".into(), + context_window: 4096, + max_output_tokens: 1024, + capabilities: vec![], + defaults: HashMap::new(), + }]) + } + + fn complete(&self, _request: Value) -> Result { + Ok(ChatResponse { + content: vec![serde_json::json!({"type": "text", "text": "Echo response from WASM provider"})], + tool_calls: None, + finish_reason: Some("stop".into()), + extra: HashMap::new(), + }) + } + + fn parse_tool_calls(&self, _response: &ChatResponse) -> Vec { + vec![] + } +} + +amplifier_guest::export_provider!(EchoProvider); +``` + +### Step 3: Compile and copy + +```bash +cd amplifier-core/tests/fixtures/wasm/src/echo-provider +cargo component build --release +cp target/wasm32-wasip2/release/echo_provider.wasm ../../echo-provider.wasm +``` + +### Step 4: Commit + +```bash +cd amplifier-core +git add tests/fixtures/wasm/src/echo-provider/ tests/fixtures/wasm/echo-provider.wasm +git commit -m "test(wasm): add echo-provider test fixture + +WASM Provider that returns a canned ChatResponse. +No real HTTP — validates Provider interface roundtrip. +Pre-compiled .wasm binary committed for E2E tests." +``` + +--- + +## Task 15: Passthrough-Orchestrator Test Fixture + +**What:** Create an Orchestrator fixture that calls `kernel::execute_tool("echo-tool", input)` via the kernel-service host import and returns the result. This proves host imports work. + +**Files:** +- Create: `tests/fixtures/wasm/src/passthrough-orchestrator/Cargo.toml` +- Create: `tests/fixtures/wasm/src/passthrough-orchestrator/src/lib.rs` +- Create: `tests/fixtures/wasm/passthrough-orchestrator.wasm` + +### Step 1: Create Cargo.toml + +Create `tests/fixtures/wasm/src/passthrough-orchestrator/Cargo.toml`: + +```toml +[package] +name = "passthrough-orchestrator" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../crates/amplifier-guest" } +serde_json = "1" + +[package.metadata.component] +package = "amplifier:passthrough-orchestrator" + +[package.metadata.component.target] +world = "orchestrator-module" +path = "../../../../wit/amplifier-modules.wit" +``` + +### Step 2: Create the implementation + +Create `tests/fixtures/wasm/src/passthrough-orchestrator/src/lib.rs`: + +```rust +//! Passthrough orchestrator — calls echo-tool via kernel-service import. +//! +//! Test fixture for validating WASM Orchestrator bridge + host imports. +//! Calls kernel::execute_tool("echo-tool", prompt) and returns the result. + +use amplifier_guest::{Orchestrator, Value}; +use amplifier_guest::kernel; + +#[derive(Default)] +struct PassthroughOrchestrator; + +impl Orchestrator for PassthroughOrchestrator { + fn execute(&self, prompt: String) -> Result { + // Call the echo-tool via kernel-service host import + let input = serde_json::json!({"prompt": prompt}); + let result = kernel::execute_tool("echo-tool", &input)?; + + // Return the tool's output as a string + match result.output { + Some(output) => Ok(output.to_string()), + None => Ok("no output".into()), + } + } +} + +amplifier_guest::export_orchestrator!(PassthroughOrchestrator); +``` + +### Step 3: Compile and copy + +```bash +cd amplifier-core/tests/fixtures/wasm/src/passthrough-orchestrator +cargo component build --release +cp target/wasm32-wasip2/release/passthrough_orchestrator.wasm ../../passthrough-orchestrator.wasm +``` + +### Step 4: Commit + +```bash +cd amplifier-core +git add tests/fixtures/wasm/src/passthrough-orchestrator/ tests/fixtures/wasm/passthrough-orchestrator.wasm +git commit -m "test(wasm): add passthrough-orchestrator test fixture + +WASM Orchestrator that calls echo-tool via kernel-service host import. +Validates Tier 2 host callback mechanism. +Pre-compiled .wasm binary committed for E2E tests." +``` + +--- + +## Task 16: `WasmProviderBridge` + +**What:** Create `bridges/wasm_provider.rs` implementing `Provider` trait. Configures WASI HTTP imports in the Linker (for Provider modules that make real HTTP calls — though our test fixture doesn't). + +**Files:** +- Create: `crates/amplifier-core/src/bridges/wasm_provider.rs` +- Modify: `crates/amplifier-core/src/bridges/mod.rs` + +### Step 1: Add module to bridges/mod.rs + +Add after `wasm_approval`: + +```rust +#[cfg(feature = "wasm")] +pub mod wasm_provider; +``` + +### Step 2: Create the bridge + +Create `crates/amplifier-core/src/bridges/wasm_provider.rs`. Follow the `WasmToolBridge` pattern but: + +- Implement `Provider` trait (5 methods: `name`, `get_info`, `list_models`, `complete`, `parse_tool_calls`) +- Call `get-info` at load time to cache provider name and info (same as `get-spec` for tools) +- For WASI HTTP: configure `wasmtime_wasi_http::add_to_linker()` on the Linker (or stub it for now if the echo-provider doesn't use real HTTP). Add `wasmtime-wasi-http` to `Cargo.toml` dependencies if needed. +- E2E tests with `echo-provider.wasm`: + - Load and verify `name()` returns `"echo-provider"` + - Call `list_models()`, verify one model returned + - Call `complete()` with a dummy request, verify `ChatResponse` has content + +### Step 3: Run tests + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- wasm_provider --verbose +``` + +### Step 4: Commit + +```bash +cd amplifier-core +git add crates/amplifier-core/src/bridges/wasm_provider.rs crates/amplifier-core/src/bridges/mod.rs crates/amplifier-core/Cargo.toml +git commit -m "feat(wasm): add WasmProviderBridge with WASI HTTP support + +Implements Provider trait for WASM provider modules. +Configures WASI HTTP imports in the Linker for real HTTP calls. +E2E test with echo-provider.wasm: get_info, list_models, complete." +``` + +--- + +## Task 17: `WasmOrchestratorBridge` + +**What:** Create `bridges/wasm_orchestrator.rs` implementing `Orchestrator` trait. Configures kernel-service host import functions in the Linker — these call back into the Coordinator, same pattern as `KernelServiceImpl` in `grpc_server.rs`. + +**Files:** +- Create: `crates/amplifier-core/src/bridges/wasm_orchestrator.rs` +- Modify: `crates/amplifier-core/src/bridges/mod.rs` + +### Step 1: Add module to bridges/mod.rs + +Add after `wasm_provider`: + +```rust +#[cfg(feature = "wasm")] +pub mod wasm_orchestrator; +``` + +### Step 2: Create the bridge + +Create `crates/amplifier-core/src/bridges/wasm_orchestrator.rs`. This is the most complex bridge because it must: + +1. Implement `Orchestrator` trait +2. Configure `kernel-service` host imports in the Linker before instantiation. Each import function (execute-tool, complete-with-provider, emit-hook, get-messages, add-message, get-capability, register-capability) must call back into the Coordinator. +3. The bridge struct holds an `Arc` (available after PR #36 merges) for routing kernel-service callbacks. + +**Reference pattern:** Look at `crates/amplifier-core/src/grpc_server.rs` (`KernelServiceImpl`). The WASM host imports do the same thing — receive a request, look up a tool/provider on the Coordinator, call it, return the result. The difference: gRPC server handles network requests, WASM host imports handle in-process function calls. + +**E2E test with `passthrough-orchestrator.wasm`:** +1. Create a `WasmEngine` +2. Create a `Coordinator` with an echo-tool mounted (use `FakeTool` or load `echo-tool.wasm` as a `WasmToolBridge`) +3. Create the `WasmOrchestratorBridge` with the coordinator +4. Call `execute("hello")` +5. Verify the orchestrator called the tool (via kernel-service import) and returned the result + +### Step 3: Run tests + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- wasm_orchestrator --verbose +``` + +### Step 4: Commit + +```bash +cd amplifier-core +git add crates/amplifier-core/src/bridges/wasm_orchestrator.rs crates/amplifier-core/src/bridges/mod.rs +git commit -m "feat(wasm): add WasmOrchestratorBridge with kernel-service host imports + +Implements Orchestrator trait for WASM orchestrator modules. +Configures kernel-service host imports in Linker: +- execute-tool → Coordinator::get_tool() + tool.execute() +- complete-with-provider → Coordinator::get_provider() + provider.complete() +- emit-hook, get-messages, add-message, get/register-capability +E2E test with passthrough-orchestrator.wasm: orchestrator → host → tool." +``` + +--- + +## Task 18: Transport Dispatch + +**What:** Add `load_wasm_*` functions for all 6 module types to `transport.rs`. Currently only `load_wasm_tool` exists. Add hook, context, approval, provider, orchestrator. + +**Files:** +- Modify: `crates/amplifier-core/src/transport.rs` + +### Step 1: Add the new load functions + +Open `crates/amplifier-core/src/transport.rs`. After the existing `load_wasm_tool` function, add: + +```rust +/// Load a WASM hook handler from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_hook( + wasm_bytes: &[u8], + engine: std::sync::Arc, +) -> Result, Box> +{ + let bridge = crate::bridges::wasm_hook::WasmHookBridge::from_bytes(wasm_bytes, engine)?; + Ok(std::sync::Arc::new(bridge)) +} + +/// Load a WASM context manager from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_context( + wasm_bytes: &[u8], + engine: std::sync::Arc, +) -> Result< + std::sync::Arc, + Box, +> { + let bridge = crate::bridges::wasm_context::WasmContextBridge::from_bytes(wasm_bytes, engine)?; + Ok(std::sync::Arc::new(bridge)) +} + +/// Load a WASM approval provider from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_approval( + wasm_bytes: &[u8], + engine: std::sync::Arc, +) -> Result< + std::sync::Arc, + Box, +> { + let bridge = + crate::bridges::wasm_approval::WasmApprovalBridge::from_bytes(wasm_bytes, engine)?; + Ok(std::sync::Arc::new(bridge)) +} + +/// Load a WASM provider from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_provider( + wasm_bytes: &[u8], + engine: std::sync::Arc, +) -> Result, Box> +{ + let bridge = + crate::bridges::wasm_provider::WasmProviderBridge::from_bytes(wasm_bytes, engine)?; + Ok(std::sync::Arc::new(bridge)) +} + +/// Load a WASM orchestrator from raw bytes (requires `wasm` feature). +/// +/// The orchestrator bridge needs a Coordinator for kernel-service host imports. +#[cfg(feature = "wasm")] +pub fn load_wasm_orchestrator( + wasm_bytes: &[u8], + engine: std::sync::Arc, + coordinator: std::sync::Arc, +) -> Result< + std::sync::Arc, + Box, +> { + let bridge = crate::bridges::wasm_orchestrator::WasmOrchestratorBridge::from_bytes( + wasm_bytes, + engine, + coordinator, + )?; + Ok(std::sync::Arc::new(bridge)) +} +``` + +### Step 2: Update the existing `load_wasm_tool` to accept an engine parameter + +The current `load_wasm_tool` creates its own bridge without a shared engine. Update it to match the new pattern: + +Replace the existing `load_wasm_tool`: + +```rust +/// Load a WASM tool module from raw bytes (requires `wasm` feature). +#[cfg(feature = "wasm")] +pub fn load_wasm_tool( + wasm_bytes: &[u8], + engine: std::sync::Arc, +) -> Result, Box> { + let bridge = crate::bridges::wasm_tool::WasmToolBridge::from_bytes(wasm_bytes, engine)?; + Ok(Arc::new(bridge)) +} +``` + +### Step 3: Add transport tests + +Add to the `#[cfg(test)] mod tests` section in `transport.rs`: + +```rust + #[cfg(feature = "wasm")] + #[test] + fn load_wasm_tool_returns_arc_dyn_tool() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/echo-tool.wasm") + .expect("echo-tool.wasm fixture not found"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let tool = super::load_wasm_tool(&wasm_bytes, engine.inner()); + assert!(tool.is_ok()); + assert_eq!(tool.unwrap().name(), "echo-tool"); + } +``` + +### Step 4: Run tests + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm -- transport --verbose +``` + +### Step 5: Commit + +```bash +cd amplifier-core +git add crates/amplifier-core/src/transport.rs +git commit -m "feat(wasm): add load_wasm_* transport functions for all 6 module types + +- load_wasm_tool (updated to accept shared engine) +- load_wasm_hook, load_wasm_context, load_wasm_approval +- load_wasm_provider, load_wasm_orchestrator (accepts Coordinator) +- All return Arc, feature-gated behind wasm" +``` + +--- + +## Task 19: E2E Test Suite + Build Script + +**What:** Replace the 2-test stub in `wasm_tool_e2e.rs` with a comprehensive E2E suite covering all 6 module types. Also create a `build-fixtures.sh` script that recompiles all fixture `.wasm` files from source. + +**Files:** +- Rewrite: `crates/amplifier-core/tests/wasm_tool_e2e.rs` → rename to `wasm_e2e.rs` +- Create: `tests/fixtures/wasm/build-fixtures.sh` + +### Step 1: Create the build-fixtures script + +Create `tests/fixtures/wasm/build-fixtures.sh`: + +```bash +#!/usr/bin/env bash +# Recompile all WASM test fixtures from source. +# +# Run from the amplifier-core root: +# bash tests/fixtures/wasm/build-fixtures.sh + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +FIXTURES_DIR="$SCRIPT_DIR" +SRC_DIR="$FIXTURES_DIR/src" + +echo "=== Building WASM test fixtures ===" + +for module_dir in "$SRC_DIR"/*/; do + module_name=$(basename "$module_dir") + echo "--- Building $module_name ---" + (cd "$module_dir" && cargo component build --release) + + # Find the .wasm output + wasm_file=$(find "$module_dir/target" -name "*.wasm" -path "*/release/*" | head -1) + if [ -z "$wasm_file" ]; then + echo "ERROR: No .wasm file found for $module_name" + exit 1 + fi + + # Copy to fixtures directory with kebab-case name + cp "$wasm_file" "$FIXTURES_DIR/$module_name.wasm" + echo " -> $FIXTURES_DIR/$module_name.wasm ($(wc -c < "$FIXTURES_DIR/$module_name.wasm") bytes)" +done + +echo "=== All fixtures built successfully ===" +``` + +Make it executable: +```bash +chmod +x tests/fixtures/wasm/build-fixtures.sh +``` + +### Step 2: Create the comprehensive E2E test file + +Delete the old file and create `crates/amplifier-core/tests/wasm_e2e.rs`: + +```rust +//! WASM E2E integration tests. +//! +//! Tests all 6 WASM module types end-to-end using pre-compiled .wasm fixtures. +//! Each test loads a fixture, creates a bridge, and calls trait methods. +//! +//! Run with: cargo test -p amplifier-core --features wasm --test wasm_e2e + +#![cfg(feature = "wasm")] + +use amplifier_core::wasm_engine::WasmEngine; +use amplifier_core::traits::Tool; + +// --------------------------------------------------------------------------- +// Tool +// --------------------------------------------------------------------------- + +#[test] +fn tool_load_from_bytes() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/echo-tool.wasm") + .expect("echo-tool.wasm not found"); + let engine = WasmEngine::new().unwrap(); + let tool = amplifier_core::transport::load_wasm_tool(&wasm_bytes, engine.inner()) + .expect("should load echo-tool"); + assert_eq!(tool.name(), "echo-tool"); + let spec = tool.get_spec(); + assert_eq!(spec.name, "echo-tool"); + assert!(spec.description.is_some()); +} + +#[tokio::test] +async fn tool_execute_roundtrip() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/echo-tool.wasm") + .expect("echo-tool.wasm not found"); + let engine = WasmEngine::new().unwrap(); + let tool = amplifier_core::transport::load_wasm_tool(&wasm_bytes, engine.inner()).unwrap(); + + let input = serde_json::json!({"message": "hello from E2E test"}); + let result = tool.execute(input.clone()).await.expect("execute should succeed"); + assert!(result.success); + assert_eq!(result.output, Some(input)); +} + +// --------------------------------------------------------------------------- +// HookHandler +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn hook_handler_deny() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/deny-hook.wasm") + .expect("deny-hook.wasm not found"); + let engine = WasmEngine::new().unwrap(); + let hook = amplifier_core::transport::load_wasm_hook(&wasm_bytes, engine.inner()).unwrap(); + + let result = hook + .handle("tool:before_execute", serde_json::json!({"tool": "bash"})) + .await + .expect("handle should succeed"); + + assert_eq!(result.action, amplifier_core::models::HookAction::Deny); + assert!(result.reason.is_some()); +} + +// --------------------------------------------------------------------------- +// ContextManager +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn context_manager_roundtrip() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/memory-context.wasm") + .expect("memory-context.wasm not found"); + let engine = WasmEngine::new().unwrap(); + let ctx = amplifier_core::transport::load_wasm_context(&wasm_bytes, engine.inner()).unwrap(); + + // Start empty + let messages = ctx.get_messages().await.expect("get_messages"); + assert!(messages.is_empty(), "should start empty"); + + // Add messages + ctx.add_message(serde_json::json!({"role": "user", "content": "hello"})) + .await + .expect("add_message"); + ctx.add_message(serde_json::json!({"role": "assistant", "content": "hi"})) + .await + .expect("add_message"); + + let messages = ctx.get_messages().await.expect("get_messages"); + assert_eq!(messages.len(), 2); + + // Clear + ctx.clear().await.expect("clear"); + let messages = ctx.get_messages().await.expect("get_messages"); + assert!(messages.is_empty(), "should be empty after clear"); +} + +// --------------------------------------------------------------------------- +// ApprovalProvider +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn approval_auto_approve() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/auto-approve.wasm") + .expect("auto-approve.wasm not found"); + let engine = WasmEngine::new().unwrap(); + let approval = amplifier_core::transport::load_wasm_approval(&wasm_bytes, engine.inner()).unwrap(); + + let request = amplifier_core::models::ApprovalRequest { + tool_name: "bash".into(), + action: "rm -rf /tmp/test".into(), + details: Default::default(), + risk_level: "high".into(), + timeout: Some(30.0), + }; + + let response = approval + .request_approval(request) + .await + .expect("request_approval should succeed"); + + assert!(response.approved); + assert!(response.reason.is_some()); +} + +// --------------------------------------------------------------------------- +// Provider +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn provider_complete() { + let wasm_bytes = std::fs::read("tests/fixtures/wasm/echo-provider.wasm") + .expect("echo-provider.wasm not found"); + let engine = WasmEngine::new().unwrap(); + let provider = amplifier_core::transport::load_wasm_provider(&wasm_bytes, engine.inner()).unwrap(); + + assert_eq!(provider.name(), "echo-provider"); + + let info = provider.get_info(); + assert_eq!(info.id, "echo-provider"); + + let models = provider.list_models().await.expect("list_models"); + assert!(!models.is_empty()); + + // complete() with a minimal request + let request = amplifier_core::messages::ChatRequest { + messages: vec![amplifier_core::messages::Message { + role: amplifier_core::messages::Role::User, + content: amplifier_core::messages::MessageContent::Text("hello".into()), + name: None, + tool_call_id: None, + metadata: None, + extensions: Default::default(), + }], + tools: None, + response_format: None, + temperature: None, + top_p: None, + max_output_tokens: None, + conversation_id: None, + stream: None, + metadata: None, + model: None, + tool_choice: None, + stop: None, + reasoning_effort: None, + timeout: None, + extensions: Default::default(), + }; + + let response = provider.complete(request).await.expect("complete"); + assert!(!response.content.is_empty()); +} + +// --------------------------------------------------------------------------- +// Orchestrator (requires kernel-service host imports) +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn orchestrator_calls_kernel() { + use amplifier_core::testing::FakeTool; + use std::sync::Arc; + + let wasm_bytes = std::fs::read("tests/fixtures/wasm/passthrough-orchestrator.wasm") + .expect("passthrough-orchestrator.wasm not found"); + + let engine = WasmEngine::new().unwrap(); + + // Set up a coordinator with an echo tool + let coordinator = Arc::new(amplifier_core::coordinator::Coordinator::new(Default::default())); + coordinator.mount_tool("echo-tool", Arc::new(FakeTool::new("echo-tool", "echoes input"))); + + let orchestrator = amplifier_core::transport::load_wasm_orchestrator( + &wasm_bytes, + engine.inner(), + coordinator.clone(), + ) + .unwrap(); + + // The passthrough orchestrator calls kernel::execute_tool("echo-tool", ...) + let context = Arc::new(amplifier_core::testing::FakeContextManager::new()); + let providers = std::collections::HashMap::new(); + let tools = std::collections::HashMap::new(); + + let result = orchestrator + .execute( + "test prompt".into(), + context, + providers, + tools, + serde_json::json!({}), + serde_json::json!({}), + ) + .await; + + assert!(result.is_ok(), "orchestrator.execute should succeed: {result:?}"); +} +``` + +### Step 3: Remove the old test file + +```bash +cd amplifier-core +rm crates/amplifier-core/tests/wasm_tool_e2e.rs +``` + +### Step 4: Run the full E2E suite + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm --test wasm_e2e --verbose +``` + +Expected: All 7 tests pass: +``` +test tool_load_from_bytes ... ok +test tool_execute_roundtrip ... ok +test hook_handler_deny ... ok +test context_manager_roundtrip ... ok +test approval_auto_approve ... ok +test provider_complete ... ok +test orchestrator_calls_kernel ... ok +``` + +### Step 5: Run the full test suite (including non-WASM tests) + +```bash +cd amplifier-core +cargo test -p amplifier-core --features wasm --verbose +``` + +Expected: ALL tests pass (WASM + non-WASM). + +### Step 6: Run clippy on everything + +```bash +cd amplifier-core +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +Expected: Clean. + +### Step 7: Commit + +```bash +cd amplifier-core +git add tests/fixtures/wasm/build-fixtures.sh crates/amplifier-core/tests/wasm_e2e.rs +git rm crates/amplifier-core/tests/wasm_tool_e2e.rs +git commit -m "test(wasm): comprehensive E2E suite for all 6 WASM module types + +Replaces the 2-test stub with 7 E2E tests: +- tool_load_from_bytes, tool_execute_roundtrip +- hook_handler_deny +- context_manager_roundtrip (stateful multi-call) +- approval_auto_approve +- provider_complete (get_info, list_models, complete) +- orchestrator_calls_kernel (host imports → Coordinator → tool) + +Also adds build-fixtures.sh script to recompile all .wasm fixtures." +``` + +--- + +## Final Verification Checklist + +After completing all 20 tasks, run these commands to verify everything works: + +```bash +cd amplifier-core + +# 1. Full build with WASM feature +cargo build -p amplifier-core --features wasm + +# 2. Full test suite +cargo test -p amplifier-core --features wasm --verbose + +# 3. Full clippy +cargo clippy -p amplifier-core --features wasm -- -D warnings + +# 4. Guest crate builds +cargo check -p amplifier-guest + +# 5. Build without WASM feature (non-WASM tests still pass) +cargo test -p amplifier-core --verbose + +# 6. Recompile all fixtures from source +bash tests/fixtures/wasm/build-fixtures.sh +``` + +All 6 commands should pass cleanly. + +--- + +## Task Dependency Graph + +``` +Task 0 (WIT) ─────────────────────────────────────┐ +Task 1 (Engine) ───────────────────────────────────┤ + │ +Task 2 (Guest scaffold) ── Task 3 (Tool trait) ──┤─── Task 6 (echo-tool.wasm) ──── Task 10 (WasmToolBridge) + Task 4 (Tier 1 traits) ┤── Task 7 (deny-hook.wasm) ──── Task 11 (WasmHookBridge) + ├── Task 8 (memory-ctx.wasm) ─── Task 12 (WasmContextBridge) + ├── Task 9 (auto-approve.wasm) ─ Task 13 (WasmApprovalBridge) + Task 5 (Tier 2 traits) ┤── Task 14 (echo-provider.wasm) ─ Task 16 (WasmProviderBridge) + └── Task 15 (passthrough.wasm) ── Task 17 (WasmOrchestratorBridge) + │ + Task 18 (Transport) ── Task 19 (E2E suite) +``` + +**Parallelizable groups:** +- Tasks 0 + 1 can run in parallel (no dependency on each other) +- Tasks 6-9 can run in parallel (independent fixture compilations) +- Tasks 10-13 can run in parallel (independent Tier 1 bridges, each only needs its own fixture) +- Tasks 14-15 can run in parallel +- Tasks 16-17 can run in parallel diff --git a/docs/plans/2026-03-05-phase4-module-resolver-design.md b/docs/plans/2026-03-05-phase4-module-resolver-design.md new file mode 100644 index 0000000..06c747f --- /dev/null +++ b/docs/plans/2026-03-05-phase4-module-resolver-design.md @@ -0,0 +1,293 @@ +# Phase 4: Cross-Language Module Resolver Design + +> Automatic transport detection and module loading — developers write `{"module": "tool-slack"}` and the framework handles everything. + +**Status:** Approved +**Date:** 2026-03-05 +**Phase:** 4 of 5 (Cross-Language SDK) +**Parent design:** `docs/plans/2026-03-02-cross-language-session-sdk-design.md` +**Prerequisites:** PR #35 (Phase 2 — Napi-RS/TypeScript bindings + wasmtime 42), PR #36 (gRPC v2 debt fix), PR #38 (Phase 3 — WASM module loading) + +--- + +## 1. Goal + +Implement the cross-language module resolver that makes transport invisible to developers. Given a resolved filesystem path to a module, automatically detect the transport (Python, WASM, gRPC) and module type (Tool, Provider, Orchestrator, etc.), then load it through the correct bridge. Developers write `{"module": "tool-slack"}` in bundle YAML and the framework handles everything. + +--- + +## 2. Background + +This is Phase 4 of the 5-phase Cross-Language SDK plan. Phase 4 is the **glue layer** — it connects two systems that currently exist side by side: + +- **Python side:** `loader.py` → `loader_dispatch.py` → `importlib` (resolves module IDs to Python packages) +- **Rust side:** `transport.rs` with `load_wasm_*` and `load_grpc_*` functions (loads modules from bytes/endpoints into `Arc`) + +Phase 4 connects them: given a resolved module path, auto-detect the language/transport and route to the correct Rust loader. + +**Dependencies (all complete):** + +- **Phase 1 (complete):** Python/PyO3 bridge +- **PR #35 / Phase 2 (merged to dev):** TypeScript/Napi-RS bindings + wasmtime 42 +- **PR #36 (merged to dev):** Full bridge fidelity + all 9 KernelService RPCs +- **PR #38 / Phase 3 (on dev):** WASM module loading — all 6 module types via Component Model + +**Current state:** The `loader_dispatch.py` has the routing skeleton (reads `amplifier.toml`, checks transport), but the WASM and native branches raise `NotImplementedError`. The `ModuleSource` protocol returns a `Path`, which works for Python but needs extending for WASM/gRPC. All work happens on the `dev/cross-language-sdk` branch. + +--- + +## 3. Key Design Decisions + +1. **Split architecture** — Rust does transport detection (pure logic), Python/TS foundation resolves URIs to paths (I/O, unchanged). Clear ownership at the boundary: foundation returns a `Path`, kernel takes it from there. This follows CORE_DEVELOPMENT_PRINCIPLES §5: "logic goes in Rust, not in bindings." + +2. **Parse WASM component WIT metadata** for module type detection — the Component Model embeds interface names in the binary. Self-describing, zero configuration. No naming conventions or extra manifest files needed for WASM modules. + +3. **Three runtime transport paths:** + - **Python** → `importlib` (existing behavior, backward compatible) + - **WASM** → wasmtime `load_wasm_*` functions (from Phase 3) + - **gRPC** → `load_grpc_*` functions (explicit opt-in via `amplifier.toml`) + + No runtime "native Rust" path (that's compile-time linking, not discovery). No auto-compilation of source code — the resolver discovers pre-built artifacts. + +4. **Serves both Python and TypeScript hosts** — Rust resolver exposed via PyO3 AND Napi-RS. TypeScript host apps get the same auto-detection for free. This was reinforced by the existing TypeScript/Node bindings from Phase 2 being a concrete second consumer. + +5. **`amplifier.toml` remains optional** — auto-detection is the primary path; explicit declaration is an override/escape hatch (especially for gRPC endpoints). + +6. **Foundation layer unchanged** — `ModuleSourceResolver` protocol, `SimpleSourceResolver`, bundle YAML format all stay the same. The resolver operates on the output of foundation resolution (a filesystem path), not the input. + +--- + +## 4. Resolver Pipeline + +Three stages with clear ownership: + +``` +Bundle YAML Foundation Rust Kernel +{"module": "tool-slack", resolve URI → inspect path → + "source": "git+..."} filesystem Path detect transport → + load module → + Arc +``` + +### Stage 1: URI Resolution (Foundation — Python/TS, unchanged) + +`source_hint` → filesystem `Path`. Git clone, local path resolution, package lookup. Already works. No changes needed. + +### Stage 2: Transport Detection (Rust kernel — new `module_resolver.rs`) + +Given a `Path`, inspect its contents and determine: + +- What transport to use (Python, WASM, gRPC) +- What module type it is (Tool, Provider, Orchestrator, etc.) +- Where the loadable artifact is (`.wasm` file path, gRPC endpoint, Python package name) + +### Stage 3: Module Loading (Rust kernel — existing `transport.rs`) + +Call the appropriate `load_wasm_*` / `load_grpc_*` function with the detected parameters. Returns `Arc`. + +### ModuleManifest — The Resolver's Output + +```rust +pub struct ModuleManifest { + pub transport: Transport, // Python | Wasm | Grpc + pub module_type: ModuleType, // Tool | Provider | Orchestrator | etc. + pub artifact: ModuleArtifact, // WasmBytes(Vec) | GrpcEndpoint(String) | PythonModule(String) +} +``` + +The Python `loader_dispatch.py` calls into Rust via PyO3 to get a `ModuleManifest`, then either loads the WASM/gRPC module directly in Rust or falls through to the existing Python importlib path. + +--- + +## 5. Transport Detection Logic + +**New file:** `crates/amplifier-core/src/module_resolver.rs` + +The resolver takes a filesystem path and returns a `ModuleManifest`. Detection is ordered — first match wins: + +### Step 1: Check for `amplifier.toml` (explicit override) + +- If present, read `transport` and `type` fields +- For gRPC: read `[grpc] endpoint` +- Always honored when present — this is the escape hatch + +### Step 2: Check for `.wasm` files + +- Scan the directory for `*.wasm` files +- If found, parse the WASM component's embedded WIT metadata using `wasmtime::component::Component::new()` + inspect exports +- Match exported interface names against known Amplifier interfaces to determine module type +- Return `Transport::Wasm` with the artifact bytes + +### Step 3: Check for Python package + +- Look for `__init__.py` or a `mount()` function pattern +- Return `Transport::Python` with the package name +- Backward-compatible fallback for the existing ecosystem + +### Step 4: No match → error + +- Clear error: "Could not detect module transport at path X. Expected: .wasm file, amplifier.toml, or Python package." + +### Source code files are not loadable artifacts + +`Cargo.toml`, `package.json`, `go.mod` indicate source code, not loadable artifacts. The resolver doesn't compile — it discovers pre-built artifacts. A Rust module author runs `cargo component build` before publishing; the resolver finds the resulting `.wasm`. If they haven't built, the error message guides them. + +--- + +## 6. WASM Component Metadata Parsing + +How the resolver determines module type from a `.wasm` file: + +1. **Load the component** using `wasmtime::component::Component::new(&engine, &bytes)` (reuses shared `WasmEngine` from Phase 3) +2. **Inspect the component's exports** — component type metadata reveals which interfaces are exported +3. **Match against known Amplifier interface names:** + +| Exported interface | Module type detected | +|---|---| +| `amplifier:modules/tool` | `ModuleType::Tool` | +| `amplifier:modules/hook-handler` | `ModuleType::Hook` | +| `amplifier:modules/context-manager` | `ModuleType::Context` | +| `amplifier:modules/approval-provider` | `ModuleType::Approval` | +| `amplifier:modules/provider` | `ModuleType::Provider` | +| `amplifier:modules/orchestrator` | `ModuleType::Orchestrator` | + +4. **If no match** → error: "WASM component does not export any known Amplifier module interface" +5. **If multiple matches** → error (a component should implement exactly one module type) + +Module authors compile with `amplifier_guest::export_tool!(MyTool)` → the macro exports the `amplifier:modules/tool` interface → the resolver reads it back. Self-describing, zero configuration. + +--- + +## 7. PyO3 + Napi-RS Bindings + +The resolver is Rust code, exposed to both host languages. + +### PyO3 Binding (Python hosts) + +```python +from amplifier_core._engine import resolve_module + +manifest = resolve_module("/path/to/resolved/module") +# Returns: {"transport": "wasm", "module_type": "tool", "artifact_path": "/path/to/tool.wasm"} +``` + +`loader_dispatch.py` becomes a thin wrapper: + +1. Foundation resolves source URI → filesystem path (unchanged) +2. Call `resolve_module(path)` → get `ModuleManifest` from Rust +3. If `transport == "python"` → existing `importlib` path (unchanged) +4. If `transport == "wasm"` → call `load_wasm_module(manifest)` in Rust via PyO3 → `Arc` mounted on coordinator +5. If `transport == "grpc"` → call `load_grpc_module(manifest)` in Rust via PyO3 + +### Napi-RS Binding (TypeScript hosts) + +```typescript +import { resolveModule, loadModule } from '@amplifier/core'; + +const manifest = resolveModule('/path/to/module'); +if (manifest.transport === 'wasm' || manifest.transport === 'grpc') { + loadModule(coordinator, manifest); +} +``` + +### Cross-host constraint + +The TypeScript host can't load Python modules (no `importlib`). If the resolver detects a Python module from a TS host, it returns an error with guidance: "Python module detected — compile to WASM or run as gRPC sidecar." This is a natural consequence of the three-path model. + +--- + +## 8. Integration with Existing Loader Chain + +Minimal changes to wire everything together. + +### Python side — `loader_dispatch.py` changes + +**Today's flow:** + +``` +_session_init.py → loader.load(module_id, config, source_hint) + → loader_dispatch.py._detect_transport(path) → reads amplifier.toml + → if python: importlib path + → if grpc: loader_grpc.py + → if wasm: NotImplementedError ❌ +``` + +**Phase 4 flow:** + +``` +_session_init.py → loader.load(module_id, config, source_hint) + → Foundation resolves source_hint → filesystem Path (unchanged) + → Call Rust: resolve_module(path) → ModuleManifest + → if python: importlib path (unchanged) + → if wasm: Call Rust: load_wasm_module(manifest) → Arc on coordinator + → if grpc: Call Rust: load_grpc_module(manifest) → Arc on coordinator +``` + +### TypeScript side — new `resolveAndLoadModule()` in Napi-RS + +```typescript +const manifest = resolveModule('/path/to/module'); +if (manifest.transport === 'wasm' || manifest.transport === 'grpc') { + loadModule(coordinator, manifest); +} +``` + +### What stays unchanged + +- Bundle YAML format — zero config changes +- Foundation source URI resolution — still resolves `git+https://...` to paths +- `ModuleSourceResolver` protocol — still returns paths +- Python module loading via `importlib` — the Python path is untouched +- All existing Python modules work exactly as before + +### What's new + +- `module_resolver.rs` in Rust kernel — source inspection + transport detection +- PyO3 binding: `resolve_module(path) → ModuleManifest` +- Napi-RS binding: `resolveModule(path) → ModuleManifest` +- `loader_dispatch.py` WASM/gRPC branches wired to Rust instead of `NotImplementedError` +- `load_module(coordinator, manifest)` convenience function dispatching to the correct loader + +--- + +## 9. Deliverables + +1. **`crates/amplifier-core/src/module_resolver.rs`** — Rust module with transport detection: `amplifier.toml` reader, `.wasm` scanner, WASM component metadata parser, Python package detector. Returns `ModuleManifest`. +2. **`ModuleManifest` + `ModuleArtifact` types** — the resolver's output struct +3. **`load_module(coordinator, manifest)` convenience function** — dispatches to correct `load_wasm_*` / `load_grpc_*` +4. **PyO3 binding:** `resolve_module(path)` + `load_module(coordinator, manifest)` exposed to Python +5. **Napi-RS binding:** `resolveModule(path)` + `loadModule(coordinator, manifest)` exposed to TypeScript +6. **`loader_dispatch.py` updated** — WASM and gRPC branches call through to Rust +7. **Tests covering all detection paths** + +--- + +## 10. Testing Strategy + +| Test | What it validates | +|---|---| +| `resolve_wasm_tool` | Directory with `echo-tool.wasm` → detects WASM transport + Tool type via component metadata | +| `resolve_wasm_provider` | Directory with `echo-provider.wasm` → detects Provider type | +| `resolve_python_package` | Directory with `__init__.py` → detects Python transport | +| `resolve_amplifier_toml_grpc` | Directory with `amplifier.toml` transport=grpc → detects gRPC + reads endpoint | +| `resolve_amplifier_toml_overrides_auto` | Directory with both `.wasm` and `amplifier.toml` → toml wins | +| `resolve_empty_dir_errors` | Empty directory → clear error message | +| `resolve_no_known_interface_errors` | `.wasm` that doesn't export Amplifier interface → error | +| `load_module_wasm_tool_e2e` | Full pipeline: resolve → load → execute echo-tool → verify roundtrip | +| `load_module_grpc_not_found` | gRPC endpoint that doesn't exist → clean error | +| Python integration: `test_loader_dispatch_wasm` | Python loader resolves path → calls Rust → mounts WASM tool on coordinator | +| Node integration: `test_resolve_and_load_wasm` | TS host resolves path → calls Rust → mounts WASM tool on coordinator | + +Reuses Phase 3 fixtures: existing `tests/fixtures/wasm/*.wasm` files as test inputs. No new fixtures needed. + +--- + +## 11. Not in Scope + +- Auto-compilation of source code (Rust → WASM, Go → WASM) +- Module hot-reload +- Module marketplace / registry +- Changes to bundle YAML format +- Changes to foundation source URI resolution +- Go/C#/C++ native host bindings (Phase 5) +- Non-Rust WASM guest SDKs (Phase 5) diff --git a/docs/plans/2026-03-05-phase4-module-resolver-implementation.md b/docs/plans/2026-03-05-phase4-module-resolver-implementation.md new file mode 100644 index 0000000..8d3ae6e --- /dev/null +++ b/docs/plans/2026-03-05-phase4-module-resolver-implementation.md @@ -0,0 +1,2114 @@ +# Phase 4: Cross-Language Module Resolver — Implementation Plan + +> **Execution:** Use the subagent-driven-development workflow to implement this plan. + +**Goal:** Given a resolved filesystem path to a module, automatically detect its transport (Python, WASM, gRPC) and module type (Tool, Provider, Orchestrator, etc.), then load it through the correct bridge — making transport invisible to developers. + +**Architecture:** A new `module_resolver.rs` in the Rust kernel inspects a directory path and returns a `ModuleManifest` describing what was found (transport, module type, artifact). Detection runs in priority order: `amplifier.toml` (explicit override) → `.wasm` files (auto-detect via Component Model metadata) → Python package (`__init__.py` fallback) → error. A convenience `load_module()` function dispatches the manifest to the correct `load_wasm_*` / `load_grpc_*` function from `transport.rs`. Both functions are exposed to Python (PyO3) and TypeScript (Napi-RS), and `loader_dispatch.py` is updated to call them instead of raising `NotImplementedError`. + +**Tech Stack:** Rust (amplifier-core), wasmtime Component Model inspection, TOML parsing (`toml` crate), PyO3 bindings, Napi-RS bindings, Python (loader_dispatch.py) + +**Design doc:** `docs/plans/2026-03-05-phase4-module-resolver-design.md` + +**Branch:** `dev/cross-language-sdk` (Phase 3 already merged — all 6 WASM bridges work, test fixtures exist) + +--- + +## Codebase Orientation (Read This First) + +You are working in `/home/bkrabach/dev/rust-devrust-core/amplifier-core/` on branch `dev/cross-language-sdk`. + +**Key existing files you'll interact with:** + +| File | What's in it | +|---|---| +| `crates/amplifier-core/src/transport.rs` | `Transport` enum (`Python`, `Grpc`, `Native`, `Wasm`), `load_wasm_tool()`, `load_wasm_hook()`, `load_wasm_context()`, `load_wasm_approval()`, `load_wasm_provider()`, `load_wasm_orchestrator()`, `load_grpc_tool()`, `load_grpc_orchestrator()`. Each `load_wasm_*` takes `(&[u8], Arc)` and returns `Result>`. | +| `crates/amplifier-core/src/models.rs` | `ModuleType` enum with variants: `Orchestrator`, `Provider`, `Tool`, `Context`, `Hook`, `Resolver`. Uses `#[serde(rename_all = "snake_case")]`. | +| `crates/amplifier-core/src/lib.rs` | Public module declarations and re-exports. You'll add `pub mod module_resolver;` here. | +| `crates/amplifier-core/src/wasm_engine.rs` | `WasmEngine` wrapper holding `Arc`. Call `WasmEngine::new()?.inner()` to get the engine. | +| `crates/amplifier-core/src/traits.rs` | The 6 module traits: `Tool`, `Provider`, `Orchestrator`, `ContextManager`, `HookHandler`, `ApprovalProvider`. | +| `crates/amplifier-core/src/coordinator.rs` | `Coordinator` struct with `new_for_test()` and typed mount points. `load_wasm_orchestrator()` requires `Arc`. | +| `crates/amplifier-core/Cargo.toml` | Dependencies. `wasmtime` and `wasmtime-wasi` are behind `features = ["wasm"]`. You'll add `toml` crate here. | +| `bindings/python/src/lib.rs` | PyO3 bindings. Has `PySession`, `PyCoordinator`, `PyHookRegistry`, `PyCancellationToken`. You'll add `resolve_module()` and `load_module()` functions. | +| `bindings/python/Cargo.toml` | PyO3 crate dependencies. You'll add `amplifier-core` wasm feature here. | +| `bindings/node/src/lib.rs` | Napi-RS bindings. Has `JsCoordinator`, `JsAmplifierSession`, etc. You'll add `resolveModule()` and `loadModule()`. | +| `bindings/node/Cargo.toml` | Napi-RS crate dependencies. You'll add `amplifier-core` wasm feature here. | +| `python/amplifier_core/loader_dispatch.py` | Current Python transport routing. Has `load_module()` with `NotImplementedError` for WASM and native transports. | +| `tests/fixtures/wasm/` | Pre-compiled `.wasm` fixtures: `echo-tool.wasm`, `deny-hook.wasm`, `memory-context.wasm`, `auto-approve.wasm`, `echo-provider.wasm`, `passthrough-orchestrator.wasm`. | +| `wit/amplifier-modules.wit` | WIT definitions. Package `amplifier:modules@1.0.0`. Interface names: `tool`, `hook-handler`, `context-manager`, `approval-provider`, `provider`, `orchestrator`. | + +**Test commands:** +```bash +# Unit tests (Rust, with WASM feature) +cargo test -p amplifier-core --features wasm + +# Specific test +cargo test -p amplifier-core --features wasm -- test_name_here + +# Clippy +cargo clippy -p amplifier-core --features wasm -- -D warnings + +# Integration tests only +cargo test -p amplifier-core --features wasm --test wasm_e2e +``` + +**Fixture helper pattern** (copy this for tests): +```rust +fn fixture(name: &str) -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../tests/fixtures/wasm").join(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture '{}' not found at {}: {}", name, path.display(), e)) +} +``` + +**INTERFACE_NAME constants in existing bridges** (the resolver must match these): +- `wasm_tool.rs`: `"amplifier:modules/tool@1.0.0"` +- `wasm_hook.rs`: `"amplifier:modules/hook-handler@1.0.0"` +- `wasm_context.rs`: `"amplifier:modules/context-manager@1.0.0"` (verify — check the file) +- `wasm_approval.rs`: `"amplifier:modules/approval-provider@1.0.0"` (verify — check the file) +- `wasm_provider.rs`: `"amplifier:modules/provider@1.0.0"` (verify — check the file) +- `wasm_orchestrator.rs`: `"amplifier:modules/orchestrator@1.0.0"` (verify — check the file) + +--- + +## Task 0: Define `ModuleManifest`, `ModuleArtifact` Types and Create `module_resolver.rs` Skeleton + +**Files:** +- Create: `crates/amplifier-core/src/module_resolver.rs` +- Modify: `crates/amplifier-core/src/lib.rs` + +### Step 1: Write the failing test + +Add the file `crates/amplifier-core/src/module_resolver.rs` with **only** the test module at the bottom — no implementation yet. This test verifies the types exist and can be constructed: + +```rust +//! Cross-language module resolver. +//! +//! Given a filesystem path, inspects its contents and determines: +//! - What transport to use (Python, WASM, gRPC) +//! - What module type it is (Tool, Provider, Orchestrator, etc.) +//! - Where the loadable artifact is +//! +//! Detection order (first match wins): +//! 1. `amplifier.toml` (explicit override) +//! 2. `.wasm` files (auto-detect via Component Model metadata) +//! 3. Python package (`__init__.py` fallback) +//! 4. Error + +use std::path::{Path, PathBuf}; + +use crate::models::ModuleType; +use crate::transport::Transport; + +/// Describes a resolved module: what transport, what type, and where the artifact is. +#[derive(Debug, Clone)] +pub struct ModuleManifest { + /// Transport to use for loading (Python, WASM, gRPC). + pub transport: Transport, + /// Module type (Tool, Provider, Orchestrator, etc.). + pub module_type: ModuleType, + /// Where the loadable artifact lives. + pub artifact: ModuleArtifact, +} + +/// The loadable artifact for a resolved module. +#[derive(Debug, Clone)] +pub enum ModuleArtifact { + /// Raw WASM component bytes, plus the path they were read from. + WasmBytes { bytes: Vec, path: PathBuf }, + /// A gRPC endpoint URL (e.g., "http://localhost:50051"). + GrpcEndpoint(String), + /// A Python package name (e.g., "amplifier_module_tool_bash"). + PythonModule(String), +} + +/// Resolve a module from a filesystem path. +/// +/// Inspects the directory at `path` and returns a `ModuleManifest` +/// describing the transport, module type, and artifact location. +pub fn resolve_module(path: &Path) -> Result { + todo!("Task 5 implements this") +} + +/// Errors from module resolution. +#[derive(Debug, thiserror::Error)] +pub enum ModuleResolverError { + /// The path does not exist or is not a directory. + #[error("module path does not exist: {path}")] + PathNotFound { path: PathBuf }, + + /// No loadable artifact found at the path. + #[error("could not detect module transport at {path}. Expected: .wasm file, amplifier.toml, or Python package (__init__.py).")] + NoArtifactFound { path: PathBuf }, + + /// WASM component does not export any known Amplifier module interface. + #[error("WASM component at {path} does not export any known Amplifier module interface. Known interfaces: amplifier:modules/tool, amplifier:modules/hook-handler, amplifier:modules/context-manager, amplifier:modules/approval-provider, amplifier:modules/provider, amplifier:modules/orchestrator")] + UnknownWasmInterface { path: PathBuf }, + + /// WASM component exports multiple Amplifier interfaces (ambiguous). + #[error("WASM component at {path} exports multiple Amplifier module interfaces ({found:?}). A component should implement exactly one module type.")] + AmbiguousWasmInterface { path: PathBuf, found: Vec }, + + /// Failed to parse `amplifier.toml`. + #[error("failed to parse amplifier.toml at {path}: {reason}")] + TomlParseError { path: PathBuf, reason: String }, + + /// Failed to read or compile a WASM file. + #[error("failed to load WASM component at {path}: {reason}")] + WasmLoadError { path: PathBuf, reason: String }, + + /// I/O error reading files. + #[error("I/O error at {path}: {source}")] + Io { + path: PathBuf, + source: std::io::Error, + }, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn module_manifest_can_be_constructed() { + let manifest = ModuleManifest { + transport: Transport::Wasm, + module_type: ModuleType::Tool, + artifact: ModuleArtifact::WasmBytes { + bytes: vec![0, 1, 2], + path: PathBuf::from("/tmp/echo-tool.wasm"), + }, + }; + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Tool); + } + + #[test] + fn module_artifact_grpc_variant() { + let artifact = ModuleArtifact::GrpcEndpoint("http://localhost:50051".into()); + match artifact { + ModuleArtifact::GrpcEndpoint(endpoint) => { + assert_eq!(endpoint, "http://localhost:50051"); + } + _ => panic!("expected GrpcEndpoint variant"), + } + } + + #[test] + fn module_artifact_python_variant() { + let artifact = ModuleArtifact::PythonModule("amplifier_module_tool_bash".into()); + match artifact { + ModuleArtifact::PythonModule(name) => { + assert_eq!(name, "amplifier_module_tool_bash"); + } + _ => panic!("expected PythonModule variant"), + } + } + + #[test] + fn module_resolver_error_displays_correctly() { + let err = ModuleResolverError::NoArtifactFound { + path: PathBuf::from("/tmp/empty"), + }; + let msg = format!("{err}"); + assert!(msg.contains("/tmp/empty")); + assert!(msg.contains(".wasm")); + assert!(msg.contains("amplifier.toml")); + assert!(msg.contains("__init__.py")); + } +} +``` + +### Step 2: Register the module in `lib.rs` + +Open `crates/amplifier-core/src/lib.rs`. Add `pub mod module_resolver;` after the `transport` line. The module list currently looks like: + +```rust +pub mod transport; +#[cfg(feature = "wasm")] +pub mod wasm_engine; +``` + +Add the new line so it becomes: + +```rust +pub mod module_resolver; +pub mod transport; +#[cfg(feature = "wasm")] +pub mod wasm_engine; +``` + +(Keep alphabetical order with other modules.) + +### Step 3: Run test to verify it passes + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core +cargo test -p amplifier-core --features wasm -- module_resolver -v +``` + +**Expected:** 4 tests pass: `module_manifest_can_be_constructed`, `module_artifact_grpc_variant`, `module_artifact_python_variant`, `module_resolver_error_displays_correctly`. + +### Step 4: Run clippy + +```bash +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +**Expected:** No errors. (The `todo!()` macro in `resolve_module` is fine — clippy doesn't flag it.) + +### Step 5: Commit + +```bash +git add crates/amplifier-core/src/module_resolver.rs crates/amplifier-core/src/lib.rs +git commit -m "feat(resolver): add ModuleManifest, ModuleArtifact types and module_resolver skeleton" +``` + +--- + +## Task 1: `amplifier.toml` Reader + +**Files:** +- Modify: `crates/amplifier-core/Cargo.toml` (add `toml` dependency) +- Modify: `crates/amplifier-core/src/module_resolver.rs` (add TOML parsing function) + +### Step 1: Add the `toml` crate dependency + +Open `crates/amplifier-core/Cargo.toml`. In the `[dependencies]` section, add: + +```toml +toml = "0.8" +``` + +Add it alphabetically — after the `tokio-stream` line and before the `wasmtime` line. + +### Step 2: Write the failing test + +Add these tests to the `mod tests` block in `module_resolver.rs`: + +```rust + #[test] + fn parse_toml_grpc_transport() { + let toml_content = r#" +[module] +transport = "grpc" +type = "tool" + +[grpc] +endpoint = "http://localhost:50051" +"#; + let manifest = parse_amplifier_toml(toml_content, Path::new("/tmp/my-module")) + .expect("should parse valid TOML"); + assert_eq!(manifest.transport, Transport::Grpc); + assert_eq!(manifest.module_type, ModuleType::Tool); + match manifest.artifact { + ModuleArtifact::GrpcEndpoint(ref ep) => assert_eq!(ep, "http://localhost:50051"), + _ => panic!("expected GrpcEndpoint"), + } + } + + #[test] + fn parse_toml_wasm_transport() { + let toml_content = r#" +[module] +transport = "wasm" +type = "provider" +artifact = "my-provider.wasm" +"#; + let manifest = parse_amplifier_toml(toml_content, Path::new("/tmp/my-module")) + .expect("should parse valid TOML"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Provider); + } + + #[test] + fn parse_toml_python_transport() { + let toml_content = r#" +[module] +transport = "python" +type = "hook" +"#; + let manifest = parse_amplifier_toml(toml_content, Path::new("/tmp/my-module")) + .expect("should parse valid TOML"); + assert_eq!(manifest.transport, Transport::Python); + assert_eq!(manifest.module_type, ModuleType::Hook); + } + + #[test] + fn parse_toml_grpc_missing_endpoint_errors() { + let toml_content = r#" +[module] +transport = "grpc" +type = "tool" +"#; + let result = parse_amplifier_toml(toml_content, Path::new("/tmp/my-module")); + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!(err_msg.contains("endpoint")); + } + + #[test] + fn parse_toml_missing_type_errors() { + let toml_content = r#" +[module] +transport = "grpc" +"#; + let result = parse_amplifier_toml(toml_content, Path::new("/tmp/my-module")); + assert!(result.is_err()); + } + + #[test] + fn parse_toml_missing_module_section_errors() { + let toml_content = r#" +name = "something" +"#; + let result = parse_amplifier_toml(toml_content, Path::new("/tmp/my-module")); + assert!(result.is_err()); + } +``` + +### Step 3: Run test to verify they fail + +```bash +cargo test -p amplifier-core --features wasm -- parse_toml -v +``` + +**Expected:** FAIL — `parse_amplifier_toml` doesn't exist yet. + +### Step 4: Write the implementation + +Add this function above the `resolve_module` function in `module_resolver.rs` (after the `ModuleResolverError` enum): + +```rust +/// Parse an `amplifier.toml` content string into a `ModuleManifest`. +/// +/// The TOML must have a `[module]` section with `transport` and `type` fields. +/// For gRPC transport, a `[grpc]` section with `endpoint` is required. +pub(crate) fn parse_amplifier_toml( + content: &str, + module_path: &Path, +) -> Result { + let table: toml::Table = content.parse().map_err(|e: toml::de::Error| { + ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: e.to_string(), + } + })?; + + let module_section = table.get("module").and_then(|v| v.as_table()).ok_or_else(|| { + ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "missing [module] section".into(), + } + })?; + + // Parse transport + let transport_str = module_section + .get("transport") + .and_then(|v| v.as_str()) + .unwrap_or("python"); + let transport = Transport::from_str(transport_str); + + // Parse module type (required) + let type_str = module_section + .get("type") + .and_then(|v| v.as_str()) + .ok_or_else(|| ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "missing 'type' field in [module] section".into(), + })?; + let module_type = parse_module_type(type_str).ok_or_else(|| { + ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: format!( + "unknown module type '{}'. Valid types: tool, hook, context, approval, provider, orchestrator, resolver", + type_str + ), + } + })?; + + // Build artifact based on transport + let artifact = match transport { + Transport::Grpc => { + let endpoint = table + .get("grpc") + .and_then(|v| v.as_table()) + .and_then(|t| t.get("endpoint")) + .and_then(|v| v.as_str()) + .ok_or_else(|| ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "gRPC transport requires [grpc] section with 'endpoint' field".into(), + })?; + ModuleArtifact::GrpcEndpoint(endpoint.to_string()) + } + Transport::Wasm => { + // If artifact path specified in TOML, use it; otherwise will be detected later + let wasm_filename = module_section + .get("artifact") + .and_then(|v| v.as_str()) + .unwrap_or("module.wasm"); + let wasm_path = module_path.join(wasm_filename); + // Don't read bytes here — the caller will read them if the file exists + ModuleArtifact::WasmBytes { + bytes: Vec::new(), + path: wasm_path, + } + } + Transport::Python | Transport::Native => { + // Derive Python module name from directory name + let dir_name = module_path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown"); + let python_name = dir_name.replace('-', "_"); + ModuleArtifact::PythonModule(python_name) + } + }; + + Ok(ModuleManifest { + transport, + module_type, + artifact, + }) +} + +/// Convert a type string to a `ModuleType`. +fn parse_module_type(s: &str) -> Option { + match s { + "tool" => Some(ModuleType::Tool), + "hook" => Some(ModuleType::Hook), + "context" => Some(ModuleType::Context), + "approval" => Some(ModuleType::Approval), + "provider" => Some(ModuleType::Provider), + "orchestrator" => Some(ModuleType::Orchestrator), + "resolver" => Some(ModuleType::Resolver), + _ => None, + } +} +``` + +**Important:** You need to add an `Approval` variant to the `ModuleType` enum in `models.rs`. Currently it has `Orchestrator, Provider, Tool, Context, Hook, Resolver`. The design requires `Approval` too (for `amplifier:modules/approval-provider`). Open `crates/amplifier-core/src/models.rs` and add `Approval` to the `ModuleType` enum: + +```rust +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ModuleType { + Orchestrator, + Provider, + Tool, + Context, + Hook, + Approval, + Resolver, +} +``` + +Also add `use toml;` is not needed since we use the fully-qualified `toml::Table` in the code — the `toml` crate is used directly via `content.parse()`. + +### Step 5: Run tests to verify they pass + +```bash +cargo test -p amplifier-core --features wasm -- parse_toml -v +``` + +**Expected:** All 6 `parse_toml_*` tests pass. + +### Step 6: Run clippy + +```bash +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +**Expected:** Clean. + +### Step 7: Commit + +```bash +git add crates/amplifier-core/Cargo.toml crates/amplifier-core/src/module_resolver.rs crates/amplifier-core/src/models.rs +git commit -m "feat(resolver): add amplifier.toml reader with TOML parsing" +``` + +--- + +## Task 2: `.wasm` File Scanner + +**Files:** +- Modify: `crates/amplifier-core/src/module_resolver.rs` + +### Step 1: Write the failing test + +Add to `mod tests`: + +```rust + #[test] + fn scan_wasm_finds_wasm_file() { + // Create a temp dir with a .wasm file + let dir = tempfile::tempdir().expect("create temp dir"); + let wasm_path = dir.path().join("echo-tool.wasm"); + std::fs::write(&wasm_path, b"fake wasm bytes").expect("write wasm file"); + + let found = scan_for_wasm_file(dir.path()).expect("should find wasm file"); + assert_eq!(found.file_name().unwrap(), "echo-tool.wasm"); + } + + #[test] + fn scan_wasm_returns_none_for_empty_dir() { + let dir = tempfile::tempdir().expect("create temp dir"); + let result = scan_for_wasm_file(dir.path()); + assert!(result.is_none()); + } + + #[test] + fn scan_wasm_ignores_non_wasm_files() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("README.md"), b"hello").expect("write"); + std::fs::write(dir.path().join("lib.py"), b"pass").expect("write"); + + let result = scan_for_wasm_file(dir.path()); + assert!(result.is_none()); + } +``` + +You'll need the `tempfile` crate for tests. Add it to `Cargo.toml` under `[dev-dependencies]`: + +```toml +[dev-dependencies] +tempfile = "3" +tokio = { version = "1", features = ["rt-multi-thread", "macros"] } +``` + +(Check if `[dev-dependencies]` already exists — if not, add the section. The `tokio` dev-dependency is needed for async tests in Task 10.) + +### Step 2: Run test to verify it fails + +```bash +cargo test -p amplifier-core --features wasm -- scan_wasm -v +``` + +**Expected:** FAIL — `scan_for_wasm_file` doesn't exist yet. + +### Step 3: Write the implementation + +Add this function in `module_resolver.rs` (above `resolve_module`): + +```rust +/// Scan a directory for `.wasm` files. Returns the path to the first one found, or None. +pub(crate) fn scan_for_wasm_file(dir: &Path) -> Option { + let entries = std::fs::read_dir(dir).ok()?; + for entry in entries.flatten() { + let path = entry.path(); + if path.is_file() { + if let Some(ext) = path.extension() { + if ext == "wasm" { + return Some(path); + } + } + } + } + None +} +``` + +### Step 4: Run test to verify it passes + +```bash +cargo test -p amplifier-core --features wasm -- scan_wasm -v +``` + +**Expected:** All 3 `scan_wasm_*` tests pass. + +### Step 5: Commit + +```bash +git add crates/amplifier-core/src/module_resolver.rs crates/amplifier-core/Cargo.toml +git commit -m "feat(resolver): add .wasm file scanner" +``` + +--- + +## Task 3: WASM Component Metadata Parser + +**Files:** +- Modify: `crates/amplifier-core/src/module_resolver.rs` + +This is the most important detection step. Given `.wasm` bytes, load the component and inspect its exports to determine the `ModuleType`. + +### Step 1: Write the failing test + +Add to `mod tests`. These tests use the real `.wasm` fixtures: + +```rust + #[cfg(feature = "wasm")] + fn fixture(name: &str) -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../tests/fixtures/wasm").join(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture '{}' not found at {}: {}", name, path.display(), e)) + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_tool() { + let bytes = fixture("echo-tool.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let module_type = detect_wasm_module_type(&bytes, engine.inner(), Path::new("echo-tool.wasm")) + .expect("should detect tool"); + assert_eq!(module_type, ModuleType::Tool); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_hook() { + let bytes = fixture("deny-hook.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let module_type = detect_wasm_module_type(&bytes, engine.inner(), Path::new("deny-hook.wasm")) + .expect("should detect hook"); + assert_eq!(module_type, ModuleType::Hook); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_context() { + let bytes = fixture("memory-context.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let module_type = detect_wasm_module_type(&bytes, engine.inner(), Path::new("memory-context.wasm")) + .expect("should detect context"); + assert_eq!(module_type, ModuleType::Context); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_approval() { + let bytes = fixture("auto-approve.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let module_type = detect_wasm_module_type(&bytes, engine.inner(), Path::new("auto-approve.wasm")) + .expect("should detect approval"); + assert_eq!(module_type, ModuleType::Approval); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_provider() { + let bytes = fixture("echo-provider.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let module_type = detect_wasm_module_type(&bytes, engine.inner(), Path::new("echo-provider.wasm")) + .expect("should detect provider"); + assert_eq!(module_type, ModuleType::Provider); + } + + #[cfg(feature = "wasm")] + #[test] + fn detect_wasm_module_type_orchestrator() { + let bytes = fixture("passthrough-orchestrator.wasm"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let module_type = detect_wasm_module_type(&bytes, engine.inner(), Path::new("passthrough-orchestrator.wasm")) + .expect("should detect orchestrator"); + assert_eq!(module_type, ModuleType::Orchestrator); + } +``` + +### Step 2: Run test to verify they fail + +```bash +cargo test -p amplifier-core --features wasm -- detect_wasm_module_type -v +``` + +**Expected:** FAIL — `detect_wasm_module_type` doesn't exist. + +### Step 3: Write the implementation + +Add this to `module_resolver.rs`. Put it above `resolve_module`, gated behind `#[cfg(feature = "wasm")]`: + +```rust +/// Known Amplifier WIT interface names and their corresponding module types. +/// +/// These are the versioned interface names embedded in WASM components by +/// `cargo component` when building against `wit/amplifier-modules.wit`. +#[cfg(feature = "wasm")] +const KNOWN_INTERFACES: &[(&str, ModuleType)] = &[ + ("amplifier:modules/tool", ModuleType::Tool), + ("amplifier:modules/hook-handler", ModuleType::Hook), + ("amplifier:modules/context-manager", ModuleType::Context), + ("amplifier:modules/approval-provider", ModuleType::Approval), + ("amplifier:modules/provider", ModuleType::Provider), + ("amplifier:modules/orchestrator", ModuleType::Orchestrator), +]; + +/// Inspect a WASM component's exports to determine which Amplifier module type it implements. +/// +/// Loads the component using the provided wasmtime engine, then iterates over +/// its exported interface names looking for matches against `KNOWN_INTERFACES`. +/// +/// Returns `Ok(ModuleType)` if exactly one known interface is found. +/// Returns `Err` if zero or more than one known interface is exported. +#[cfg(feature = "wasm")] +pub(crate) fn detect_wasm_module_type( + wasm_bytes: &[u8], + engine: std::sync::Arc, + wasm_path: &Path, +) -> Result { + let component = wasmtime::component::Component::new(&engine, wasm_bytes).map_err(|e| { + ModuleResolverError::WasmLoadError { + path: wasm_path.to_path_buf(), + reason: format!("failed to compile WASM component: {e}"), + } + })?; + + // Get the component type and inspect exports + let component_type = component.component_type(); + let mut found: Vec<(String, ModuleType)> = Vec::new(); + + for (name, _export) in component_type.exports(&engine) { + for (interface_prefix, module_type) in KNOWN_INTERFACES { + // Export names may include the version suffix (e.g., "amplifier:modules/tool@1.0.0") + // so we use starts_with to match the base name. + if name.starts_with(interface_prefix) { + found.push((name.to_string(), module_type.clone())); + } + } + } + + match found.len() { + 0 => Err(ModuleResolverError::UnknownWasmInterface { + path: wasm_path.to_path_buf(), + }), + 1 => Ok(found.into_iter().next().unwrap().1), + _ => Err(ModuleResolverError::AmbiguousWasmInterface { + path: wasm_path.to_path_buf(), + found: found.into_iter().map(|(name, _)| name).collect(), + }), + } +} +``` + +**Important:** You need to add `use std::sync::Arc;` at the top of the file if not already present. Also, the `ModuleType` enum needs `Clone` — check that it already has `#[derive(Debug, Clone, ...)]` in `models.rs`. (It does: `#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]`.) + +### Step 4: Run tests to verify they pass + +```bash +cargo test -p amplifier-core --features wasm -- detect_wasm_module_type -v +``` + +**Expected:** All 6 `detect_wasm_module_type_*` tests pass. + +**NOTE:** If any test fails because the export name format doesn't match (e.g., it uses a different versioning scheme), read the actual error message. You may need to adjust the `starts_with` matching or add debug logging to see what the actual export names are. A quick debugging approach: + +```rust +// Temporary debug: print all exports +for (name, _) in component_type.exports(&engine) { + eprintln!("EXPORT: {name}"); +} +``` + +Run with `cargo test ... -- --nocapture` to see the output. + +### Step 5: Run clippy + +```bash +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +### Step 6: Commit + +```bash +git add crates/amplifier-core/src/module_resolver.rs +git commit -m "feat(resolver): add WASM component metadata parser for module type detection" +``` + +--- + +## Task 4: Python Package Detector + +**Files:** +- Modify: `crates/amplifier-core/src/module_resolver.rs` + +### Step 1: Write the failing test + +Add to `mod tests`: + +```rust + #[test] + fn detect_python_package_with_init_py() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# Python package").expect("write"); + + let result = detect_python_package(dir.path()); + assert!(result.is_some()); + let name = result.unwrap(); + // Package name is derived from directory name + assert!(!name.is_empty()); + } + + #[test] + fn detect_python_package_with_nested_package() { + let dir = tempfile::tempdir().expect("create temp dir"); + let pkg_dir = dir.path().join("amplifier_module_tool_bash"); + std::fs::create_dir(&pkg_dir).expect("create pkg dir"); + std::fs::write(pkg_dir.join("__init__.py"), b"# Package").expect("write"); + + let result = detect_python_package(dir.path()); + assert!(result.is_some()); + } + + #[test] + fn detect_python_package_empty_dir() { + let dir = tempfile::tempdir().expect("create temp dir"); + let result = detect_python_package(dir.path()); + assert!(result.is_none()); + } + + #[test] + fn detect_python_package_no_init_py() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("README.md"), b"hello").expect("write"); + std::fs::write(dir.path().join("main.py"), b"print('hi')").expect("write"); + + let result = detect_python_package(dir.path()); + assert!(result.is_none()); + } +``` + +### Step 2: Run test to verify they fail + +```bash +cargo test -p amplifier-core --features wasm -- detect_python -v +``` + +**Expected:** FAIL — `detect_python_package` doesn't exist. + +### Step 3: Write the implementation + +Add to `module_resolver.rs`: + +```rust +/// Check if a directory contains a Python package (has `__init__.py`). +/// +/// Checks two locations: +/// 1. `path/__init__.py` (the directory itself is a package) +/// 2. `path//__init__.py` (nested package, e.g., `amplifier_module_*`) +/// +/// Returns the Python package name if found, or None. +pub(crate) fn detect_python_package(dir: &Path) -> Option { + // Check if the directory itself has __init__.py + if dir.join("__init__.py").exists() { + let name = dir + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown_module"); + return Some(name.replace('-', "_")); + } + + // Check for a subdirectory with __init__.py (nested package) + if let Ok(entries) = std::fs::read_dir(dir) { + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() && path.join("__init__.py").exists() { + let name = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("unknown_module"); + return Some(name.replace('-', "_")); + } + } + } + + None +} +``` + +### Step 4: Run tests to verify they pass + +```bash +cargo test -p amplifier-core --features wasm -- detect_python -v +``` + +**Expected:** All 4 `detect_python_*` tests pass. + +### Step 5: Commit + +```bash +git add crates/amplifier-core/src/module_resolver.rs +git commit -m "feat(resolver): add Python package detector" +``` + +--- + +## Task 5: Orchestrate Detection — `resolve_module()` + +**Files:** +- Modify: `crates/amplifier-core/src/module_resolver.rs` + +This ties Tasks 1–4 together into the `resolve_module()` function. + +### Step 1: Write the failing tests + +Add to `mod tests`: + +```rust + #[test] + fn resolve_module_with_amplifier_toml() { + let dir = tempfile::tempdir().expect("create temp dir"); + let toml_content = r#" +[module] +transport = "grpc" +type = "tool" + +[grpc] +endpoint = "http://localhost:9999" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml_content).expect("write toml"); + // Also add a .wasm file to prove TOML takes priority + std::fs::write(dir.path().join("echo-tool.wasm"), b"fake").expect("write wasm"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Grpc); + assert_eq!(manifest.module_type, ModuleType::Tool); + match manifest.artifact { + ModuleArtifact::GrpcEndpoint(ref ep) => assert_eq!(ep, "http://localhost:9999"), + _ => panic!("expected GrpcEndpoint"), + } + } + + #[test] + fn resolve_module_with_python_package() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# package").expect("write"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Python); + } + + #[test] + fn resolve_module_empty_dir_errors() { + let dir = tempfile::tempdir().expect("create temp dir"); + let result = resolve_module(dir.path()); + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!(err_msg.contains("could not detect")); + } + + #[test] + fn resolve_module_nonexistent_path_errors() { + let result = resolve_module(Path::new("/tmp/nonexistent-module-path-xyz")); + assert!(result.is_err()); + let err_msg = format!("{}", result.unwrap_err()); + assert!(err_msg.contains("does not exist")); + } + + #[cfg(feature = "wasm")] + #[test] + fn resolve_module_with_real_wasm_fixture() { + // Create a temp dir and copy a real fixture into it + let dir = tempfile::tempdir().expect("create temp dir"); + let wasm_bytes = fixture("echo-tool.wasm"); + std::fs::write(dir.path().join("echo-tool.wasm"), &wasm_bytes).expect("write wasm"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Tool); + match &manifest.artifact { + ModuleArtifact::WasmBytes { bytes, path } => { + assert!(!bytes.is_empty()); + assert!(path.to_string_lossy().contains("echo-tool.wasm")); + } + _ => panic!("expected WasmBytes"), + } + } +``` + +### Step 2: Run test to verify they fail + +```bash +cargo test -p amplifier-core --features wasm -- resolve_module -v +``` + +**Expected:** FAIL — `resolve_module` still has `todo!()`. + +### Step 3: Write the implementation + +Replace the `resolve_module` function body (remove the `todo!()`): + +```rust +/// Resolve a module from a filesystem path. +/// +/// Inspects the directory at `path` and returns a `ModuleManifest` +/// describing the transport, module type, and artifact location. +/// +/// Detection order (first match wins): +/// 1. `amplifier.toml` (explicit override — always honored when present) +/// 2. `.wasm` files (auto-detect via Component Model metadata) +/// 3. Python package (`__init__.py` fallback) +/// 4. Error (clear guidance message) +pub fn resolve_module(path: &Path) -> Result { + // Validate the path exists + if !path.exists() { + return Err(ModuleResolverError::PathNotFound { + path: path.to_path_buf(), + }); + } + + // Step 1: Check for amplifier.toml (explicit override) + let toml_path = path.join("amplifier.toml"); + if toml_path.exists() { + let content = std::fs::read_to_string(&toml_path).map_err(|e| ModuleResolverError::Io { + path: toml_path.clone(), + source: e, + })?; + return parse_amplifier_toml(&content, path); + } + + // Step 2: Check for .wasm files + if let Some(wasm_path) = scan_for_wasm_file(path) { + let bytes = std::fs::read(&wasm_path).map_err(|e| ModuleResolverError::Io { + path: wasm_path.clone(), + source: e, + })?; + + // Detect module type from WASM component metadata + #[cfg(feature = "wasm")] + { + let engine = crate::wasm_engine::WasmEngine::new().map_err(|e| { + ModuleResolverError::WasmLoadError { + path: wasm_path.clone(), + reason: format!("failed to create WASM engine: {e}"), + } + })?; + let module_type = detect_wasm_module_type(&bytes, engine.inner(), &wasm_path)?; + return Ok(ModuleManifest { + transport: Transport::Wasm, + module_type, + artifact: ModuleArtifact::WasmBytes { + bytes, + path: wasm_path, + }, + }); + } + + #[cfg(not(feature = "wasm"))] + { + return Err(ModuleResolverError::WasmLoadError { + path: wasm_path, + reason: "WASM support not enabled. Compile with --features wasm".into(), + }); + } + } + + // Step 3: Check for Python package + if let Some(package_name) = detect_python_package(path) { + return Ok(ModuleManifest { + transport: Transport::Python, + module_type: ModuleType::Tool, // Default; Python side will refine + artifact: ModuleArtifact::PythonModule(package_name), + }); + } + + // Step 4: Nothing found + Err(ModuleResolverError::NoArtifactFound { + path: path.to_path_buf(), + }) +} +``` + +### Step 4: Run tests to verify they pass + +```bash +cargo test -p amplifier-core --features wasm -- resolve_module -v +``` + +**Expected:** All 5 `resolve_module_*` tests pass. + +### Step 5: Run clippy + +```bash +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +### Step 6: Commit + +```bash +git add crates/amplifier-core/src/module_resolver.rs +git commit -m "feat(resolver): implement resolve_module() detection pipeline" +``` + +--- + +## Task 6: `load_module()` Dispatch Function + +**Files:** +- Modify: `crates/amplifier-core/src/module_resolver.rs` + +### Step 1: Write the failing test + +Add to `mod tests`: + +```rust + #[cfg(feature = "wasm")] + #[tokio::test] + async fn load_module_wasm_tool() { + // Create a temp dir with a real fixture + let dir = tempfile::tempdir().expect("create temp dir"); + let wasm_bytes = fixture("echo-tool.wasm"); + std::fs::write(dir.path().join("echo-tool.wasm"), &wasm_bytes).expect("write wasm"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + let engine = crate::wasm_engine::WasmEngine::new().unwrap(); + let coordinator = std::sync::Arc::new(crate::coordinator::Coordinator::new_for_test()); + let result = load_module(&manifest, engine.inner(), Some(coordinator)); + assert!(result.is_ok()); + match result.unwrap() { + LoadedModule::Tool(tool) => assert_eq!(tool.name(), "echo-tool"), + other => panic!("expected Tool, got {:?}", other.variant_name()), + } + } + + #[test] + fn load_module_python_returns_signal() { + let manifest = ModuleManifest { + transport: Transport::Python, + module_type: ModuleType::Tool, + artifact: ModuleArtifact::PythonModule("my_tool".into()), + }; + // Python loading should NOT be handled in Rust — return a signal + let engine_placeholder = crate::wasm_engine::WasmEngine::new().unwrap(); + let result = load_module(&manifest, engine_placeholder.inner(), None); + assert!(result.is_ok()); + match result.unwrap() { + LoadedModule::PythonDelegated { package_name } => { + assert_eq!(package_name, "my_tool"); + } + other => panic!("expected PythonDelegated, got {:?}", other.variant_name()), + } + } +``` + +### Step 2: Run test to verify they fail + +```bash +cargo test -p amplifier-core --features wasm -- load_module -v +``` + +**Expected:** FAIL — `load_module` and `LoadedModule` don't exist. + +### Step 3: Write the implementation + +Add to `module_resolver.rs`: + +```rust +/// The result of loading a module through the resolver. +/// +/// For WASM and gRPC, returns a loaded `Arc`. +/// For Python, returns a signal that the caller should use importlib. +#[cfg(feature = "wasm")] +pub enum LoadedModule { + Tool(std::sync::Arc), + Hook(std::sync::Arc), + Context(std::sync::Arc), + Approval(std::sync::Arc), + Provider(std::sync::Arc), + Orchestrator(std::sync::Arc), + /// Python modules can't be loaded in Rust — this signals the caller + /// to use importlib on the Python side. + PythonDelegated { package_name: String }, +} + +#[cfg(feature = "wasm")] +impl LoadedModule { + /// Return a string name for the variant (for debug/error messages). + pub fn variant_name(&self) -> &'static str { + match self { + LoadedModule::Tool(_) => "Tool", + LoadedModule::Hook(_) => "Hook", + LoadedModule::Context(_) => "Context", + LoadedModule::Approval(_) => "Approval", + LoadedModule::Provider(_) => "Provider", + LoadedModule::Orchestrator(_) => "Orchestrator", + LoadedModule::PythonDelegated { .. } => "PythonDelegated", + } + } +} + +/// Load a module from a resolved `ModuleManifest`. +/// +/// Dispatches to the correct `load_wasm_*` or `load_grpc_*` function +/// from `transport.rs` based on the manifest's transport and module type. +/// +/// For Python transport, returns `LoadedModule::PythonDelegated` — the +/// Python host should handle loading via importlib. +/// +/// # Arguments +/// +/// * `manifest` — The resolved module manifest from `resolve_module()`. +/// * `engine` — A shared wasmtime Engine (from `WasmEngine::new().inner()`). +/// * `coordinator` — Optional coordinator (required only for orchestrator modules). +#[cfg(feature = "wasm")] +pub fn load_module( + manifest: &ModuleManifest, + engine: std::sync::Arc, + coordinator: Option>, +) -> Result> { + match &manifest.transport { + Transport::Python | Transport::Native => { + // Python modules are loaded by the Python host via importlib + if let ModuleArtifact::PythonModule(name) = &manifest.artifact { + return Ok(LoadedModule::PythonDelegated { + package_name: name.clone(), + }); + } + Err("Python transport but artifact is not PythonModule".into()) + } + Transport::Wasm => { + let bytes = match &manifest.artifact { + ModuleArtifact::WasmBytes { bytes, .. } => bytes, + _ => return Err("WASM transport but artifact is not WasmBytes".into()), + }; + match &manifest.module_type { + ModuleType::Tool => { + let loaded = crate::transport::load_wasm_tool(bytes, engine)?; + Ok(LoadedModule::Tool(loaded)) + } + ModuleType::Hook => { + let loaded = crate::transport::load_wasm_hook(bytes, engine)?; + Ok(LoadedModule::Hook(loaded)) + } + ModuleType::Context => { + let loaded = crate::transport::load_wasm_context(bytes, engine)?; + Ok(LoadedModule::Context(loaded)) + } + ModuleType::Approval => { + let loaded = crate::transport::load_wasm_approval(bytes, engine)?; + Ok(LoadedModule::Approval(loaded)) + } + ModuleType::Provider => { + let loaded = crate::transport::load_wasm_provider(bytes, engine)?; + Ok(LoadedModule::Provider(loaded)) + } + ModuleType::Orchestrator => { + let coord = coordinator.ok_or( + "Orchestrator modules require a Coordinator, but none was provided", + )?; + let loaded = crate::transport::load_wasm_orchestrator(bytes, engine, coord)?; + Ok(LoadedModule::Orchestrator(loaded)) + } + ModuleType::Resolver => { + Err("Resolver modules are not loadable via WASM transport".into()) + } + } + } + Transport::Grpc => { + // gRPC loading is async — for now return an error indicating + // the caller should use the async load_grpc_* functions directly. + // A full async load_module is deferred to avoid changing the sync API. + Err("gRPC module loading requires async runtime. Use load_grpc_tool() / load_grpc_orchestrator() directly.".into()) + } + } +} +``` + +### Step 4: Run tests to verify they pass + +```bash +cargo test -p amplifier-core --features wasm -- load_module -v +``` + +**Expected:** Both `load_module_*` tests pass. + +### Step 5: Run full test suite + +```bash +cargo test -p amplifier-core --features wasm +``` + +**Expected:** All existing tests still pass, plus all new module_resolver tests. + +### Step 6: Run clippy + +```bash +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +### Step 7: Commit + +```bash +git add crates/amplifier-core/src/module_resolver.rs +git commit -m "feat(resolver): add load_module() dispatch function" +``` + +--- + +## Task 7: PyO3 Bindings — Expose `resolve_module()` and `load_module()` to Python + +**Files:** +- Modify: `bindings/python/Cargo.toml` (add `wasm` feature) +- Modify: `bindings/python/src/lib.rs` (add functions) + +### Step 1: Update `bindings/python/Cargo.toml` + +Change the `amplifier-core` dependency to enable the `wasm` feature: + +```toml +amplifier-core = { path = "../../crates/amplifier-core", features = ["wasm"] } +``` + +### Step 2: Write the Python-facing functions + +Open `bindings/python/src/lib.rs`. At the very bottom of the file, just before the closing `}` of the module (or after the last `#[pymethods]` block), add these standalone `#[pyfunction]` functions: + +```rust +// --------------------------------------------------------------------------- +// Module resolver bindings (Phase 4) +// --------------------------------------------------------------------------- + +/// Resolve a module from a filesystem path. +/// +/// Returns a dict with keys: "transport", "module_type", "artifact_type", +/// and artifact-specific keys ("artifact_path", "endpoint", "package_name"). +#[pyfunction] +fn resolve_module(py: Python<'_>, path: String) -> PyResult> { + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| PyErr::new::(format!("{e}")))?; + + let dict = PyDict::new(py); + // Transport + let transport_str = match manifest.transport { + amplifier_core::transport::Transport::Python => "python", + amplifier_core::transport::Transport::Wasm => "wasm", + amplifier_core::transport::Transport::Grpc => "grpc", + amplifier_core::transport::Transport::Native => "native", + }; + dict.set_item("transport", transport_str)?; + + // Module type + let type_str = match manifest.module_type { + amplifier_core::ModuleType::Tool => "tool", + amplifier_core::ModuleType::Hook => "hook", + amplifier_core::ModuleType::Context => "context", + amplifier_core::ModuleType::Approval => "approval", + amplifier_core::ModuleType::Provider => "provider", + amplifier_core::ModuleType::Orchestrator => "orchestrator", + amplifier_core::ModuleType::Resolver => "resolver", + }; + dict.set_item("module_type", type_str)?; + + // Artifact + match &manifest.artifact { + amplifier_core::module_resolver::ModuleArtifact::WasmBytes { path, .. } => { + dict.set_item("artifact_type", "wasm")?; + dict.set_item("artifact_path", path.to_string_lossy().as_ref())?; + } + amplifier_core::module_resolver::ModuleArtifact::GrpcEndpoint(endpoint) => { + dict.set_item("artifact_type", "grpc")?; + dict.set_item("endpoint", endpoint.as_str())?; + } + amplifier_core::module_resolver::ModuleArtifact::PythonModule(name) => { + dict.set_item("artifact_type", "python")?; + dict.set_item("package_name", name.as_str())?; + } + } + + Ok(dict.unbind()) +} + +/// Load a WASM module from a resolved manifest path and mount it on the coordinator. +/// +/// This is the Rust-side loading for WASM modules. Python modules are handled +/// by importlib on the Python side — this function returns an error for Python transport. +/// +/// Returns a dict with "status" = "loaded" and "module_type" on success. +#[pyfunction] +fn load_wasm_from_path(py: Python<'_>, path: String) -> PyResult> { + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| PyErr::new::(format!("{e}")))?; + + if manifest.transport != amplifier_core::transport::Transport::Wasm { + return Err(PyErr::new::(format!( + "load_wasm_from_path only handles WASM modules, got transport '{:?}'", + manifest.transport + ))); + } + + let engine = amplifier_core::wasm_engine::WasmEngine::new() + .map_err(|e| PyErr::new::(format!("WASM engine creation failed: {e}")))?; + + let coordinator = std::sync::Arc::new(amplifier_core::Coordinator::new_for_test()); + let loaded = amplifier_core::module_resolver::load_module(&manifest, engine.inner(), Some(coordinator)) + .map_err(|e| PyErr::new::(format!("Module loading failed: {e}")))?; + + let dict = PyDict::new(py); + dict.set_item("status", "loaded")?; + dict.set_item("module_type", loaded.variant_name())?; + Ok(dict.unbind()) +} +``` + +Then register these functions in the `#[pymodule]` function. Find the `_engine` module function (it should be at the bottom of the file). If it looks like: + +```rust +#[pymodule] +fn _engine(m: &Bound<'_, PyModule>) -> PyResult<()> { + m.add_class::()?; + // ... + Ok(()) +} +``` + +Add: + +```rust + m.add_function(wrap_pyfunction!(resolve_module, m)?)?; + m.add_function(wrap_pyfunction!(load_wasm_from_path, m)?)?; +``` + +**NOTE:** If the `#[pymodule]` function isn't at the bottom of the file, search for `#[pymodule]` to find it. Read the file first to confirm the exact structure before editing. + +### Step 3: Verify it compiles + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core +cargo check -p amplifier-core-py +``` + +**Expected:** Compiles without errors. + +### Step 4: Run clippy + +```bash +cargo clippy -p amplifier-core-py -- -D warnings +``` + +### Step 5: Commit + +```bash +git add bindings/python/Cargo.toml bindings/python/src/lib.rs +git commit -m "feat(resolver): expose resolve_module() and load_wasm_from_path() via PyO3" +``` + +--- + +## Task 8: Update `loader_dispatch.py` + +**Files:** +- Modify: `python/amplifier_core/loader_dispatch.py` + +### Step 1: Read the current file + +```bash +cat python/amplifier_core/loader_dispatch.py +``` + +Verify it still has the `NotImplementedError` for WASM and native transports. + +### Step 2: Update the file + +Replace the entire `load_module` function in `loader_dispatch.py` with: + +```python +async def load_module( + module_id: str, + config: dict[str, Any] | None, + source_path: str, + coordinator: Any, +) -> Any: + """Load a module from a resolved source path. + + Uses the Rust module resolver to auto-detect transport type. + Falls back to Python loader for backward compatibility. + + Args: + module_id: Module identifier (e.g., "tool-database") + config: Optional module configuration dict + source_path: Resolved filesystem path to the module + coordinator: The coordinator instance (RustCoordinator or ModuleCoordinator) + + Returns: + Mount function for the module + + Raises: + NotImplementedError: For transport types not yet supported + ValueError: If module cannot be loaded + """ + # Try Rust resolver first for auto-detection + try: + from amplifier_core._engine import resolve_module as rust_resolve + + manifest = rust_resolve(source_path) + transport = manifest.get("transport", "python") + except ImportError: + # Rust engine not available — fall back to TOML-based detection + logger.debug("Rust engine not available, using Python-only transport detection") + transport = _detect_transport(source_path) + except Exception as e: + # Rust resolver failed — fall back to TOML-based detection + logger.debug(f"Rust resolver failed for '{module_id}': {e}, falling back to Python detection") + transport = _detect_transport(source_path) + + if transport == "grpc": + from .loader_grpc import load_grpc_module + + meta = _read_module_meta(source_path) + return await load_grpc_module(module_id, config, meta, coordinator) + + if transport == "wasm": + try: + from amplifier_core._engine import load_wasm_from_path + + result = load_wasm_from_path(source_path) + logger.info( + f"[module:mount] {module_id} loaded via WASM resolver: {result}" + ) + # WASM modules are loaded into the Rust coordinator directly. + # Return a no-op mount function since the module is already loaded. + async def _noop_mount(coord: Any) -> None: + pass + return _noop_mount + except ImportError: + raise NotImplementedError( + f"WASM module loading for '{module_id}' requires the Rust engine. " + "Install amplifier-core with Rust extensions enabled." + ) + except Exception as e: + raise ValueError( + f"Failed to load WASM module '{module_id}' from {source_path}: {e}" + ) from e + + if transport == "native": + raise NotImplementedError( + f"Native Rust module loading not yet implemented for '{module_id}'. " + "Use transport = 'grpc' to load Rust modules as gRPC services." + ) + + # Default: existing Python loader (backward compatible) + from .loader import ModuleLoader + + loader = coordinator.loader or ModuleLoader(coordinator=coordinator) + return await loader.load(module_id, config, source_hint=source_path) +``` + +### Step 3: Verify Python syntax + +```bash +python3 -c "import ast; ast.parse(open('python/amplifier_core/loader_dispatch.py').read()); print('OK')" +``` + +**Expected:** Prints `OK`. + +### Step 4: Commit + +```bash +git add python/amplifier_core/loader_dispatch.py +git commit -m "feat(resolver): wire WASM/gRPC branches to Rust resolver in loader_dispatch.py" +``` + +--- + +## Task 9: Napi-RS Bindings — Expose `resolveModule()` and `loadModule()` to TypeScript + +**Files:** +- Modify: `bindings/node/Cargo.toml` (add `wasm` feature) +- Modify: `bindings/node/src/lib.rs` (add functions) + +### Step 1: Update `bindings/node/Cargo.toml` + +Change the `amplifier-core` dependency to enable the `wasm` feature: + +```toml +amplifier-core = { path = "../../crates/amplifier-core", features = ["wasm"] } +``` + +### Step 2: Add the Napi-RS functions + +Open `bindings/node/src/lib.rs`. Add at the bottom, before the closing of the file: + +```rust +// --------------------------------------------------------------------------- +// Module resolver bindings (Phase 4) +// --------------------------------------------------------------------------- + +/// Result from resolving a module path. +#[napi(object)] +pub struct JsModuleManifest { + /// Transport type: "python", "wasm", "grpc", "native" + pub transport: String, + /// Module type: "tool", "hook", "context", "approval", "provider", "orchestrator" + pub module_type: String, + /// Artifact type: "wasm", "grpc", "python" + pub artifact_type: String, + /// Path to WASM artifact (if artifact_type is "wasm") + pub artifact_path: Option, + /// gRPC endpoint (if artifact_type is "grpc") + pub endpoint: Option, + /// Python package name (if artifact_type is "python") + pub package_name: Option, +} + +/// Resolve a module from a filesystem path. +/// +/// Returns a JsModuleManifest describing the transport, module type, and artifact. +#[napi] +pub fn resolve_module(path: String) -> Result { + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| Error::from_reason(format!("{e}")))?; + + let transport = match manifest.transport { + amplifier_core::transport::Transport::Python => "python", + amplifier_core::transport::Transport::Wasm => "wasm", + amplifier_core::transport::Transport::Grpc => "grpc", + amplifier_core::transport::Transport::Native => "native", + }; + + let module_type = match manifest.module_type { + amplifier_core::ModuleType::Tool => "tool", + amplifier_core::ModuleType::Hook => "hook", + amplifier_core::ModuleType::Context => "context", + amplifier_core::ModuleType::Approval => "approval", + amplifier_core::ModuleType::Provider => "provider", + amplifier_core::ModuleType::Orchestrator => "orchestrator", + amplifier_core::ModuleType::Resolver => "resolver", + }; + + let (artifact_type, artifact_path, endpoint, package_name) = match &manifest.artifact { + amplifier_core::module_resolver::ModuleArtifact::WasmBytes { path, .. } => { + ("wasm", Some(path.to_string_lossy().to_string()), None, None) + } + amplifier_core::module_resolver::ModuleArtifact::GrpcEndpoint(ep) => { + ("grpc", None, Some(ep.clone()), None) + } + amplifier_core::module_resolver::ModuleArtifact::PythonModule(name) => { + ("python", None, None, Some(name.clone())) + } + }; + + Ok(JsModuleManifest { + transport: transport.to_string(), + module_type: module_type.to_string(), + artifact_type: artifact_type.to_string(), + artifact_path, + endpoint, + package_name, + }) +} + +/// Load a WASM module from a path and return status info. +/// +/// For WASM modules: loads the component and returns module type info. +/// For Python modules: returns an error (TS host can't load Python). +#[napi] +pub fn load_wasm_from_path(path: String) -> Result { + let manifest = amplifier_core::module_resolver::resolve_module(std::path::Path::new(&path)) + .map_err(|e| Error::from_reason(format!("{e}")))?; + + if manifest.transport == amplifier_core::transport::Transport::Python { + return Err(Error::from_reason( + "Python module detected — compile to WASM or run as gRPC sidecar. \ + TypeScript hosts cannot load Python modules." + )); + } + + if manifest.transport != amplifier_core::transport::Transport::Wasm { + return Err(Error::from_reason(format!( + "load_wasm_from_path only handles WASM modules, got transport '{:?}'", + manifest.transport + ))); + } + + let engine = amplifier_core::wasm_engine::WasmEngine::new() + .map_err(|e| Error::from_reason(format!("WASM engine creation failed: {e}")))?; + + let coordinator = std::sync::Arc::new(amplifier_core::Coordinator::new_for_test()); + let loaded = amplifier_core::module_resolver::load_module( + &manifest, + engine.inner(), + Some(coordinator), + ) + .map_err(|e| Error::from_reason(format!("Module loading failed: {e}")))?; + + Ok(format!("loaded:{}", loaded.variant_name())) +} +``` + +### Step 3: Verify it compiles + +```bash +cargo check -p amplifier-core-node +``` + +**Expected:** Compiles without errors. + +### Step 4: Run clippy + +```bash +cargo clippy -p amplifier-core-node -- -D warnings +``` + +### Step 5: Commit + +```bash +git add bindings/node/Cargo.toml bindings/node/src/lib.rs +git commit -m "feat(resolver): expose resolveModule() and loadWasmFromPath() via Napi-RS" +``` + +--- + +## Task 10: Integration Tests — Full Pipeline E2E + +**Files:** +- Create: `crates/amplifier-core/tests/module_resolver_e2e.rs` + +### Step 1: Create the integration test file + +Create `crates/amplifier-core/tests/module_resolver_e2e.rs`: + +```rust +//! Module resolver E2E integration tests. +//! +//! Tests the full pipeline: resolve_module() → detect type → load → verify. +//! +//! Run with: cargo test -p amplifier-core --features wasm --test module_resolver_e2e + +#![cfg(feature = "wasm")] + +use std::path::Path; +use std::sync::Arc; + +use amplifier_core::models::ModuleType; +use amplifier_core::module_resolver::{ + resolve_module, load_module, ModuleArtifact, LoadedModule, ModuleResolverError, +}; +use amplifier_core::transport::Transport; +use amplifier_core::wasm_engine::WasmEngine; + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +fn fixture(name: &str) -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../tests/fixtures/wasm").join(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture '{}' not found at {}: {}", name, path.display(), e)) +} + +fn make_engine() -> Arc { + WasmEngine::new() + .expect("WasmEngine::new() should succeed") + .inner() +} + +/// Create a temp dir with a single .wasm fixture copied into it. +fn dir_with_wasm(fixture_name: &str) -> tempfile::TempDir { + let dir = tempfile::tempdir().expect("create temp dir"); + let bytes = fixture(fixture_name); + std::fs::write(dir.path().join(fixture_name), &bytes).expect("write fixture"); + dir +} + +// --------------------------------------------------------------------------- +// Test: resolve + detect type for each of the 6 WASM module types +// --------------------------------------------------------------------------- + +#[test] +fn resolve_wasm_tool() { + let dir = dir_with_wasm("echo-tool.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Tool); +} + +#[test] +fn resolve_wasm_hook() { + let dir = dir_with_wasm("deny-hook.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Hook); +} + +#[test] +fn resolve_wasm_context() { + let dir = dir_with_wasm("memory-context.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Context); +} + +#[test] +fn resolve_wasm_approval() { + let dir = dir_with_wasm("auto-approve.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Approval); +} + +#[test] +fn resolve_wasm_provider() { + let dir = dir_with_wasm("echo-provider.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Provider); +} + +#[test] +fn resolve_wasm_orchestrator() { + let dir = dir_with_wasm("passthrough-orchestrator.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Orchestrator); +} + +// --------------------------------------------------------------------------- +// Test: Python package detection +// --------------------------------------------------------------------------- + +#[test] +fn resolve_python_package() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# package").expect("write"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Python); + assert_eq!(manifest.module_type, ModuleType::Tool); // default for Python + match &manifest.artifact { + ModuleArtifact::PythonModule(_) => {} // OK + other => panic!("expected PythonModule, got {:?}", other), + } +} + +// --------------------------------------------------------------------------- +// Test: amplifier.toml gRPC detection +// --------------------------------------------------------------------------- + +#[test] +fn resolve_amplifier_toml_grpc() { + let dir = tempfile::tempdir().expect("create temp dir"); + let toml = r#" +[module] +transport = "grpc" +type = "tool" + +[grpc] +endpoint = "http://localhost:50051" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml).expect("write toml"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + assert_eq!(manifest.transport, Transport::Grpc); + assert_eq!(manifest.module_type, ModuleType::Tool); + match &manifest.artifact { + ModuleArtifact::GrpcEndpoint(ep) => assert_eq!(ep, "http://localhost:50051"), + other => panic!("expected GrpcEndpoint, got {:?}", other), + } +} + +// --------------------------------------------------------------------------- +// Test: amplifier.toml overrides auto-detection +// --------------------------------------------------------------------------- + +#[test] +fn resolve_amplifier_toml_overrides_auto() { + let dir = tempfile::tempdir().expect("create temp dir"); + + // Put both a .wasm file AND an amplifier.toml — TOML should win + let bytes = fixture("echo-tool.wasm"); + std::fs::write(dir.path().join("echo-tool.wasm"), &bytes).expect("write wasm"); + + let toml = r#" +[module] +transport = "grpc" +type = "provider" + +[grpc] +endpoint = "http://override:1234" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml).expect("write toml"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + // TOML takes priority over .wasm auto-detection + assert_eq!(manifest.transport, Transport::Grpc); + assert_eq!(manifest.module_type, ModuleType::Provider); +} + +// --------------------------------------------------------------------------- +// Test: error cases +// --------------------------------------------------------------------------- + +#[test] +fn resolve_empty_dir_errors() { + let dir = tempfile::tempdir().expect("create temp dir"); + let result = resolve_module(dir.path()); + assert!(result.is_err()); + + let err = result.unwrap_err(); + let msg = format!("{err}"); + assert!(msg.contains("could not detect"), "error: {msg}"); +} + +#[test] +fn resolve_nonexistent_path_errors() { + let result = resolve_module(Path::new("/nonexistent/path/module")); + assert!(result.is_err()); + + let err = result.unwrap_err(); + let msg = format!("{err}"); + assert!(msg.contains("does not exist"), "error: {msg}"); +} + +// --------------------------------------------------------------------------- +// Test: full pipeline — resolve → load → execute (echo-tool roundtrip) +// --------------------------------------------------------------------------- + +#[tokio::test] +async fn load_module_wasm_tool_e2e() { + let dir = dir_with_wasm("echo-tool.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + + let engine = make_engine(); + let coordinator = Arc::new(amplifier_core::Coordinator::new_for_test()); + let loaded = load_module(&manifest, engine, Some(coordinator)) + .expect("should load"); + + match loaded { + LoadedModule::Tool(tool) => { + // Verify the tool loaded correctly + assert_eq!(tool.name(), "echo-tool"); + + // Execute it and verify roundtrip + let input = serde_json::json!({"message": "hello from resolver", "count": 7}); + let result = tool.execute(input.clone()).await.expect("execute should succeed"); + assert!(result.success); + assert_eq!(result.output, Some(input)); + } + other => panic!("expected Tool, got {}", other.variant_name()), + } +} + +#[tokio::test] +async fn load_module_wasm_hook_e2e() { + let dir = dir_with_wasm("deny-hook.wasm"); + let manifest = resolve_module(dir.path()).expect("should resolve"); + + let engine = make_engine(); + let loaded = load_module(&manifest, engine, None).expect("should load"); + + match loaded { + LoadedModule::Hook(_hook) => { + // Hook loaded successfully — that's the assertion + } + other => panic!("expected Hook, got {}", other.variant_name()), + } +} + +#[test] +fn load_module_python_returns_delegated() { + let dir = tempfile::tempdir().expect("create temp dir"); + std::fs::write(dir.path().join("__init__.py"), b"# package").expect("write"); + + let manifest = resolve_module(dir.path()).expect("should resolve"); + let engine = make_engine(); + let loaded = load_module(&manifest, engine, None).expect("should load"); + + match loaded { + LoadedModule::PythonDelegated { package_name } => { + assert!(!package_name.is_empty()); + } + other => panic!("expected PythonDelegated, got {}", other.variant_name()), + } +} +``` + +### Step 2: Run the integration tests + +```bash +cargo test -p amplifier-core --features wasm --test module_resolver_e2e -v +``` + +**Expected:** All tests pass. If any fail, read the error message and fix. + +### Step 3: Run the full test suite to ensure nothing is broken + +```bash +cargo test -p amplifier-core --features wasm +``` + +**Expected:** All tests pass (existing + new). + +### Step 4: Run clippy on everything + +```bash +cargo clippy -p amplifier-core --features wasm -- -D warnings +cargo clippy -p amplifier-core-py -- -D warnings +cargo clippy -p amplifier-core-node -- -D warnings +``` + +**Expected:** Clean across all three crates. + +### Step 5: Commit + +```bash +git add crates/amplifier-core/tests/module_resolver_e2e.rs +git commit -m "test(resolver): add E2E integration tests for full resolve → load → execute pipeline" +``` + +--- + +## Final Checklist + +After all 11 tasks are complete, verify: + +1. **All Rust tests pass:** + ```bash + cargo test -p amplifier-core --features wasm + ``` + +2. **All integration tests pass:** + ```bash + cargo test -p amplifier-core --features wasm --test module_resolver_e2e -v + cargo test -p amplifier-core --features wasm --test wasm_e2e -v + ``` + +3. **Clippy is clean:** + ```bash + cargo clippy -p amplifier-core --features wasm -- -D warnings + cargo clippy -p amplifier-core-py -- -D warnings + cargo clippy -p amplifier-core-node -- -D warnings + ``` + +4. **Python syntax is valid:** + ```bash + python3 -c "import ast; ast.parse(open('python/amplifier_core/loader_dispatch.py').read()); print('OK')" + ``` + +5. **Git log shows clean conventional commits:** + ```bash + git log --oneline -11 + ``` + + Expected (newest first): + ``` + test(resolver): add E2E integration tests for full resolve → load → execute pipeline + feat(resolver): expose resolveModule() and loadWasmFromPath() via Napi-RS + feat(resolver): wire WASM/gRPC branches to Rust resolver in loader_dispatch.py + feat(resolver): expose resolve_module() and load_wasm_from_path() via PyO3 + feat(resolver): add load_module() dispatch function + feat(resolver): implement resolve_module() detection pipeline + feat(resolver): add Python package detector + feat(resolver): add WASM component metadata parser for module type detection + feat(resolver): add .wasm file scanner + feat(resolver): add amplifier.toml reader with TOML parsing + feat(resolver): add ModuleManifest, ModuleArtifact types and module_resolver skeleton + ``` + +--- + +## Summary of New/Modified Files + +| Action | File | +|---|---| +| **Create** | `crates/amplifier-core/src/module_resolver.rs` | +| **Create** | `crates/amplifier-core/tests/module_resolver_e2e.rs` | +| Modify | `crates/amplifier-core/src/lib.rs` (add `pub mod module_resolver;`) | +| Modify | `crates/amplifier-core/src/models.rs` (add `Approval` variant to `ModuleType`) | +| Modify | `crates/amplifier-core/Cargo.toml` (add `toml` + `tempfile` deps) | +| Modify | `bindings/python/Cargo.toml` (add `wasm` feature) | +| Modify | `bindings/python/src/lib.rs` (add `resolve_module`, `load_wasm_from_path`) | +| Modify | `bindings/node/Cargo.toml` (add `wasm` feature) | +| Modify | `bindings/node/src/lib.rs` (add `resolve_module`, `load_wasm_from_path`) | +| Modify | `python/amplifier_core/loader_dispatch.py` (wire WASM/gRPC to Rust) | diff --git a/docs/plans/2026-03-07-cross-language-sdk-dogfooding-plan.md b/docs/plans/2026-03-07-cross-language-sdk-dogfooding-plan.md new file mode 100644 index 0000000..b09691b --- /dev/null +++ b/docs/plans/2026-03-07-cross-language-sdk-dogfooding-plan.md @@ -0,0 +1,942 @@ +# Cross-Language SDK Dogfooding — Integration Testing Plan + +> **Execution:** Use the subagent-driven-development workflow to implement this plan. + +**Goal:** Close the gaps between "unit tests pass" and "real developer workflow works" for the cross-language SDK. Wire the Python and TypeScript hosts to actually load WASM modules in real sessions, fix the integration plumbing, build a real WASM module using the developer authoring workflow, and exercise mixed-transport sessions. + +**Why this matters:** Phases 2–4 delivered ~1,000 passing tests across Rust, Python, and TypeScript. But the pieces have **never been connected end-to-end in a real session.** A developer writing `{"module": "my-wasm-tool"}` in a config today would get a no-op — the WASM module "loads" but never mounts into the coordinator, so it's not callable. This plan fixes that and proves the full pipeline works. + +**Branch:** `dev/cross-language-sdk` (Phases 2–4 already complete, all tests passing) + +**Design doc:** This plan is self-contained — no separate design doc. The gaps were identified by inspecting the actual source code against the developer workflow. + +--- + +## Codebase Orientation (Read This First) + +You are working in `/home/bkrabach/dev/rust-devrust-core/amplifier-core/` on branch `dev/cross-language-sdk`. + +### What's the big picture? + +Amplifier is a modular AI agent framework. A **session** loads **modules** (tools, hooks, providers, orchestrators, context managers) and wires them together via a **coordinator**. Modules can be written in Python (loaded via importlib), Rust (compiled to WASM and loaded via wasmtime), or run as gRPC services. + +The cross-language SDK (Phases 1–4) built the plumbing to load WASM and gRPC modules alongside Python modules. But right now, the session initialization code (`_session_init.py`) still calls the old Python-only loader directly — it never touches the new cross-language dispatch layer. That's what we're fixing. + +### Key files you'll interact with + +| File | What it does | Why you care | +|---|---|---| +| `python/amplifier_core/_session_init.py` | Loads all modules when a session starts. Currently calls `loader.load()` 5 times (orchestrator, context, providers, tools, hooks). | **Task 0** — you'll wire this to `loader_dispatch.load_module()` instead. | +| `python/amplifier_core/loader_dispatch.py` | Routes module loading by transport (Python/WASM/gRPC). WASM branch currently returns `_noop_mount` — loads but doesn't mount. | **Task 2** — you'll replace `_noop_mount` with real WASM mounting. | +| `bindings/python/src/lib.rs` | PyO3 bindings. `load_wasm_from_path()` creates a test coordinator and discards the loaded module. | **Task 1** — you'll fix this to work with a real coordinator. | +| `bindings/node/src/lib.rs` | Napi-RS bindings. `resolveModule()` and `loadWasmFromPath()` for TypeScript. | **Task 6** — you'll use these from a TS script. | +| `crates/amplifier-core/src/module_resolver.rs` | Rust resolver: auto-detects transport and module type from a directory path. Works correctly (989 lines, well-tested). | Read-only reference — don't modify. | +| `crates/amplifier-core/src/transport.rs` | `load_wasm_tool()`, `load_wasm_hook()`, etc. Each takes `(&[u8], Arc)` and returns `Arc`. | You'll call these indirectly via `load_module()`. | +| `crates/amplifier-core/src/coordinator.rs` | `Coordinator` struct with typed mount points (tools, hooks, orchestrator, etc.). | The loaded modules must be mounted here. | +| `crates/amplifier-guest/` | Guest SDK crate for authoring WASM modules in Rust. | **Task 4** — you'll use this to build a real module. | +| `tests/fixtures/wasm/` | 6 pre-compiled `.wasm` test fixtures: `echo-tool.wasm`, `deny-hook.wasm`, `memory-context.wasm`, `auto-approve.wasm`, `echo-provider.wasm`, `passthrough-orchestrator.wasm`. | Used in Tasks 2, 3, 5, 7. | +| `tests/fixtures/wasm/src/` | Source code for each fixture (Rust crates using `amplifier-guest`). | Reference for Task 4. | +| `wit/amplifier-modules.wit` | WIT interface definitions. Package `amplifier:modules@1.0.0`. | Reference only — defines the WASM contract. | + +### How `_session_init.py` works today + +When a session starts, `initialize_session()` runs this sequence: + +``` +1. Get or create a ModuleLoader +2. Load orchestrator: await loader.load(orchestrator_id, config, source_hint=...) +3. Load context: await loader.load(context_id, config, source_hint=...) +4. Load providers: for each → await loader.load(module_id, config, source_hint=...) +5. Load tools: for each → await loader.load(module_id, config, source_hint=...) +6. Load hooks: for each → await loader.load(module_id, config, source_hint=...) +``` + +Each `loader.load()` call returns a **mount function**. The mount function is then called with the coordinator: `cleanup = await mount_fn(coordinator)`. This registers the module in the coordinator's mount points (e.g., `coordinator.mount_points["tools"]["my-tool"] = tool_instance`). + +The problem: `loader.load()` only knows about Python modules via importlib. It has no concept of WASM or gRPC. The new `loader_dispatch.load_module()` knows about all transports, but nobody calls it. + +### How `loader_dispatch.py` works today + +```python +async def load_module(module_id, config, source_path, coordinator): + # 1. Try Rust resolver → get transport type + # 2. If "grpc" → call load_grpc_module() + # 3. If "wasm" → call load_wasm_from_path() → return _noop_mount ← BUG + # 4. If "python" → fall through to loader.load() +``` + +The WASM branch calls `load_wasm_from_path()` (Rust via PyO3), which successfully loads the WASM bytes and creates the module instance — but then discards it. The `_noop_mount` function does nothing. The module is never registered in the coordinator. + +### Test commands + +```bash +# Rust unit tests (no WASM) +cargo test -p amplifier-core + +# Rust unit tests (with WASM feature — enables WASM bridges + resolver) +cargo test -p amplifier-core --features wasm + +# WASM lib tests only (resolver + bridges) +cargo test -p amplifier-core --features wasm --lib + +# Module resolver tests specifically +cargo test -p amplifier-core --features wasm -- module_resolver + +# WASM E2E integration tests +cargo test -p amplifier-core --features wasm --test wasm_e2e + +# Module resolver E2E tests +cargo test -p amplifier-core --features wasm --test resolver_e2e + +# Python tests +cd python && python -m pytest tests/ -x -q + +# TypeScript tests (Napi-RS bindings) +cd bindings/node && npm test + +# Clippy (lint) +cargo clippy -p amplifier-core --features wasm -- -D warnings +``` + +**⚠️ Known issue:** `cargo test --features wasm` can hang if another cargo process holds a build lock. If you see a test run stall for more than 30 seconds, kill it, run `cargo clean`, and retry. The tests themselves complete in <5 seconds when run cleanly. + +### Fixture helper pattern (copy this for Rust tests) + +```rust +fn fixture(name: &str) -> Vec { + let manifest = std::path::Path::new(env!("CARGO_MANIFEST_DIR")); + let path = manifest.join("../../tests/fixtures/wasm").join(name); + std::fs::read(&path) + .unwrap_or_else(|e| panic!("fixture '{}' not found at {}: {}", name, path.display(), e)) +} +``` + +--- + +## Task 0: Wire `_session_init.py` → `loader_dispatch.load_module()` + +**What:** Replace the 5 `loader.load()` calls in `_session_init.py` with calls to `loader_dispatch.load_module()`. This is a wiring change only — Python modules must continue to work identically because `loader_dispatch` falls through to `ModuleLoader` for Python transport. + +**Why:** Without this, WASM and gRPC modules can never load in a real session. The session initialization code doesn't know `loader_dispatch` exists. + +**Files:** +- Modify: `python/amplifier_core/_session_init.py` — replace `loader.load()` calls with `loader_dispatch.load_module()` +- Modify: `python/amplifier_core/loader_dispatch.py` — adjust `load_module()` signature if needed to match what `_session_init.py` passes + +### Step 1: Write the failing test + +Create a test in `python/tests/test_session_init_dispatch.py` that verifies `_session_init.initialize_session()` calls `loader_dispatch.load_module()` instead of `loader.load()`: + +```python +"""Test that _session_init routes through loader_dispatch.""" +import asyncio +from unittest.mock import AsyncMock, patch + + +def test_initialize_session_calls_loader_dispatch(): + """Verify that initialize_session uses loader_dispatch.load_module + instead of directly calling loader.load().""" + config = { + "session": { + "orchestrator": {"module": "loop-basic", "source": "/path/to/orch"}, + "context": {"module": "context-simple", "source": "/path/to/ctx"}, + }, + "tools": [ + {"module": "tool-echo", "source": "/path/to/tool", "config": {}}, + ], + "providers": [], + "hooks": [], + } + + mock_mount = AsyncMock(return_value=None) + mock_load_module = AsyncMock(return_value=mock_mount) + + mock_coordinator = AsyncMock() + mock_coordinator.loader = None + mock_coordinator.register_cleanup = lambda x: None + + with patch( + "amplifier_core._session_init.load_module", mock_load_module + ): + asyncio.get_event_loop().run_until_complete( + __import__( + "amplifier_core._session_init", fromlist=["initialize_session"] + ).initialize_session(config, mock_coordinator, "test-session", None) + ) + + # Should have called load_module 3 times: orchestrator, context, tool + assert mock_load_module.call_count == 3 +``` + +This test will fail because `_session_init.py` currently calls `loader.load()`, not `loader_dispatch.load_module()`. + +### Step 2: Implement + +In `_session_init.py`: +1. Add `from .loader_dispatch import load_module` at the top of `initialize_session()` +2. Replace each `await loader.load(module_id, config, source_hint=source)` with `await load_module(module_id, config, source_path=source, coordinator=coordinator)` +3. Keep the `ModuleLoader` creation as a fallback — `loader_dispatch` uses it internally for Python modules + +The key signature difference: +- Old: `loader.load(module_id, config, source_hint=source)` — returns a mount function +- New: `load_module(module_id, config, source_path=source, coordinator=coordinator)` — also returns a mount function + +Both return mount functions with the same `async def mount(coordinator) -> cleanup_fn` contract, so the rest of the code (`cleanup = await mount_fn(coordinator)`) stays the same. + +### Step 3: Verify + +```bash +# The new test passes +cd python && python -m pytest tests/test_session_init_dispatch.py -x -q + +# ALL existing Python tests still pass (zero regressions) +cd python && python -m pytest tests/ -x -q +``` + +**Commit message:** `feat(dogfood): wire _session_init.py → loader_dispatch.load_module()` + +--- + +## Task 1: Fix `load_wasm_from_path` PyO3 Binding + +**What:** The current `load_wasm_from_path` in `bindings/python/src/lib.rs` creates a `Coordinator::new_for_test()`, loads the WASM module into it, and throws both away — returning only `{"status": "loaded"}`. Fix it to accept a real coordinator and return the loaded module so it can be mounted. + +**Why:** Without this, WASM modules load into a throwaway coordinator. Even if `loader_dispatch.py` calls this function, the module ends up in the wrong coordinator — the real session's coordinator never sees it. + +**Files:** +- Modify: `bindings/python/src/lib.rs` — fix `load_wasm_from_path` to work with real coordinators +- May add: a new function `load_and_mount_wasm(coordinator, path, module_type)` that loads the WASM bytes, creates the bridge, and mounts it into the coordinator's mount points + +### Step 1: Write the failing test + +Add a Rust test in `bindings/python/src/lib.rs` (in the `#[cfg(test)] mod tests` block) that documents the expected behavior: + +```rust +#[test] +fn load_wasm_from_path_should_accept_coordinator() { + // This test documents the expected API change. + // The function should accept a coordinator reference and mount + // the WASM module into it, not create a throwaway test coordinator. + // + // Current behavior: creates Coordinator::new_for_test(), discards it. + // Expected behavior: accepts PyCoordinator, mounts into its mount points. + // + // The actual integration test is in Python (Task 2), but this + // documents the Rust-side contract. +} +``` + +The real test happens in Python (Task 2) — but document the contract here. + +### Step 2: Implement + +Two approaches (choose based on complexity): + +**Approach A — New function:** Add `load_and_mount_wasm(coordinator: &PyCoordinator, path: String, module_type: String)` that: +1. Reads the `.wasm` bytes from `path` +2. Creates a `WasmEngine` +3. Calls the appropriate `load_wasm_*` function based on `module_type` (tool, hook, context, etc.) +4. Mounts the resulting `Arc` into the coordinator's Python-visible mount points +5. Returns a status dict + +**Approach B — Fix existing function:** Modify `load_wasm_from_path` to accept an optional coordinator parameter. If provided, mount into it. If not, use test coordinator (backward compat with existing tests). + +The challenge is bridging between the Rust `Arc` (or Hook, etc.) and the Python coordinator's mount points (which are Python dicts). You may need to create a thin Python wrapper that delegates calls to the Rust trait object via PyO3. + +### Step 3: Verify + +```bash +# Rust tests pass +cargo test -p amplifier-python-bindings + +# Clippy clean +cargo clippy -p amplifier-python-bindings -- -D warnings +``` + +**Commit message:** `feat(dogfood): fix load_wasm_from_path to accept real coordinator` + +--- + +## Task 2: Fix `loader_dispatch.py` WASM Mount Bridge + +**What:** Replace the `_noop_mount` function in `loader_dispatch.py` with a real bridge that calls the Rust `load_and_mount_wasm()` via PyO3 and mounts the WASM module into the coordinator's mount points. + +**Why:** This is the critical gap. Today: WASM module "loads" (Rust creates the module object), but `_noop_mount` does nothing — the module is never registered in the coordinator, so it can't be called. After this fix: the module loads AND mounts, making it callable like any Python module. + +**Files:** +- Modify: `python/amplifier_core/loader_dispatch.py` — replace `_noop_mount` with real WASM mounting logic + +### Step 1: Write the failing test + +Create `python/tests/test_loader_dispatch_wasm.py`: + +```python +"""Test that loader_dispatch actually mounts WASM modules.""" +import asyncio +import os + + +def test_wasm_tool_mounts_into_coordinator(): + """Loading a WASM tool via loader_dispatch should mount it + into the coordinator's tools mount point, not just return a no-op.""" + from amplifier_core.loader_dispatch import load_module + + # Use the echo-tool fixture + fixture_dir = os.path.join( + os.path.dirname(__file__), + "..", "..", "tests", "fixtures", "wasm" + ) + + # Create a mock coordinator with real mount points + coordinator = MockCoordinator() + + mount_fn = asyncio.get_event_loop().run_until_complete( + load_module("echo-tool", {}, fixture_dir, coordinator) + ) + + # Mount into the coordinator + asyncio.get_event_loop().run_until_complete(mount_fn(coordinator)) + + # The tool should now be in the coordinator's tools mount point + assert "echo-tool" in coordinator.mount_points["tools"], \ + "WASM tool was not mounted into coordinator — _noop_mount is still in place" +``` + +This test will fail because `_noop_mount` doesn't actually mount anything. + +### Step 2: Implement + +In `loader_dispatch.py`, replace the WASM branch: + +```python +if transport == "wasm": + try: + from amplifier_core._engine import load_and_mount_wasm + + # load_and_mount_wasm handles: read .wasm → create engine → + # load bridge → mount into coordinator + result = load_and_mount_wasm(coordinator, source_path, module_id) + logger.info(f"[module:mount] {module_id} mounted via WASM: {result}") + + async def _wasm_mount(coord): + # Already mounted by load_and_mount_wasm above + pass + + return _wasm_mount + except ImportError: + raise NotImplementedError(...) +``` + +Or, if the mounting needs to happen at mount-time (deferred): + +```python +if transport == "wasm": + try: + from amplifier_core._engine import load_and_mount_wasm + + async def _wasm_mount(coord): + result = load_and_mount_wasm(coord, source_path, module_id) + logger.info(f"[module:mount] {module_id} mounted via WASM: {result}") + + return _wasm_mount + except ImportError: + raise NotImplementedError(...) +``` + +### Step 3: Verify + +```bash +# The new test passes +cd python && python -m pytest tests/test_loader_dispatch_wasm.py -x -q + +# All existing tests still pass +cd python && python -m pytest tests/ -x -q +cargo test -p amplifier-core --features wasm +``` + +**Commit message:** `feat(dogfood): replace _noop_mount with real WASM mounting in loader_dispatch` + +--- + +## Task 3: Add `amplifier.toml` to WASM Fixture Directories + +**What:** Create `amplifier.toml` files in each WASM fixture's source directory so the manifest-based detection path works alongside the auto-detection path. + +**Why:** The module resolver has two detection paths: (1) auto-detect by inspecting `.wasm` file Component Model metadata, and (2) read `amplifier.toml` for an explicit declaration. Path 1 already works. Path 2 has no test fixtures. Both paths should work so developers can choose either approach. + +**Files (all new):** +- Create: `tests/fixtures/wasm/src/echo-tool/amplifier.toml` +- Create: `tests/fixtures/wasm/src/deny-hook/amplifier.toml` +- Create: `tests/fixtures/wasm/src/memory-context/amplifier.toml` +- Create: `tests/fixtures/wasm/src/auto-approve/amplifier.toml` +- Create: `tests/fixtures/wasm/src/echo-provider/amplifier.toml` +- Create: `tests/fixtures/wasm/src/passthrough-orchestrator/amplifier.toml` + +### Step 1: Write the failing test + +Add a Rust test in `crates/amplifier-core/src/module_resolver.rs` (in the test module): + +```rust +#[test] +fn resolve_fixture_via_amplifier_toml() { + // The echo-tool fixture source directory should have an amplifier.toml + // that declares transport = "wasm" and type = "tool". + let fixture_src = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../tests/fixtures/wasm/src/echo-tool"); + + // This should work via the amplifier.toml path (priority 1) + let manifest = resolve_module(&fixture_src).expect("should resolve via amplifier.toml"); + assert_eq!(manifest.transport, Transport::Wasm); + assert_eq!(manifest.module_type, ModuleType::Tool); +} +``` + +This test will fail because the `amplifier.toml` file doesn't exist yet. + +### Step 2: Implement + +Create each `amplifier.toml` with this format: + +```toml +[module] +transport = "wasm" +type = "tool" # varies per fixture +``` + +The `type` value for each fixture: + +| Fixture | `type` value | +|---|---| +| `echo-tool` | `"tool"` | +| `deny-hook` | `"hook"` | +| `memory-context` | `"context"` | +| `auto-approve` | `"approval"` | +| `echo-provider` | `"provider"` | +| `passthrough-orchestrator` | `"orchestrator"` | + +### Step 3: Verify + +```bash +# The new test passes +cargo test -p amplifier-core --features wasm -- resolve_fixture_via_amplifier_toml + +# All existing resolver tests still pass +cargo test -p amplifier-core --features wasm -- module_resolver +``` + +**Commit message:** `feat(dogfood): add amplifier.toml to all WASM fixture source directories` + +--- + +## Task 4: Build a "Real" WASM Tool (Calculator) + +**What:** Create a slightly useful WASM tool — not a test fixture, but something a developer would actually build — to prove the developer authoring workflow. A calculator tool that evaluates simple math expressions. + +**Why:** The test fixtures are minimal echo/passthrough modules. Building a real (if simple) module proves: +- The `amplifier-guest` SDK is usable +- `cargo component build` works for a fresh project +- The resulting `.wasm` binary loads via the resolver +- The tool actually does something when called + +**Files:** +- Create: `examples/wasm-modules/calculator-tool/Cargo.toml` +- Create: `examples/wasm-modules/calculator-tool/src/lib.rs` +- Create: `examples/wasm-modules/calculator-tool/amplifier.toml` +- Generated: `examples/wasm-modules/calculator-tool/target/.../calculator_tool.wasm` (build artifact — don't commit the `target/` dir) +- Create: `examples/wasm-modules/calculator-tool.wasm` (committed pre-built binary, like the test fixtures) + +### Step 1: Write the failing test + +Add a Rust integration test that expects the calculator tool to exist and be loadable: + +```rust +#[test] +fn calculator_tool_loads_and_resolves() { + let wasm_path = Path::new(env!("CARGO_MANIFEST_DIR")) + .join("../../examples/wasm-modules/calculator-tool.wasm"); + + assert!(wasm_path.exists(), "calculator-tool.wasm not built yet"); + + let bytes = std::fs::read(&wasm_path).unwrap(); + let engine = WasmEngine::new().unwrap(); + let coordinator = Arc::new(Coordinator::new_for_test()); + let tool = load_wasm_tool(&bytes, engine.inner(), coordinator).unwrap(); + + let spec = futures::executor::block_on(tool.spec()); + assert_eq!(spec.name, "calculator"); +} +``` + +### Step 2: Implement + +Create the crate at `examples/wasm-modules/calculator-tool/`: + +**`Cargo.toml`:** +```toml +[package] +name = "calculator-tool" +version = "0.1.0" +edition = "2021" + +[dependencies] +amplifier-guest = { path = "../../../crates/amplifier-guest" } + +[lib] +crate-type = ["cdylib"] +``` + +**`src/lib.rs`:** +```rust +use amplifier_guest::{Tool, ToolSpec, ToolResult, Value, Param}; + +struct CalculatorTool; + +impl Tool for CalculatorTool { + fn spec(&self) -> ToolSpec { + ToolSpec { + name: "calculator".to_string(), + description: "Evaluates simple math expressions (+, -, *, /)".to_string(), + parameters: vec![ + Param { + name: "expression".to_string(), + description: "Math expression to evaluate (e.g., '2 + 3 * 4')".to_string(), + param_type: "string".to_string(), + required: true, + }, + ], + } + } + + fn execute(&self, args: Value) -> ToolResult { + // Extract expression from args + let expr = args.get("expression") + .and_then(|v| v.as_str()) + .unwrap_or("0"); + + // Simple evaluation (real implementation would use a parser) + match eval_simple(expr) { + Ok(result) => ToolResult::success(format!("{result}")), + Err(e) => ToolResult::error(format!("Calculation error: {e}")), + } + } +} + +fn eval_simple(expr: &str) -> Result { + // Minimal evaluator for "a op b" expressions + // A real tool would use a proper math parser + let expr = expr.trim(); + // Try to parse as a plain number first + if let Ok(n) = expr.parse::() { + return Ok(n); + } + // Look for operators + for op in ['+', '-', '*', '/'] { + if let Some(pos) = expr.rfind(op) { + if pos == 0 { continue; } // negative number + let left: f64 = expr[..pos].trim().parse() + .map_err(|e| format!("Bad left operand: {e}"))?; + let right: f64 = expr[pos+1..].trim().parse() + .map_err(|e| format!("Bad right operand: {e}"))?; + return match op { + '+' => Ok(left + right), + '-' => Ok(left - right), + '*' => Ok(left * right), + '/' => { + if right == 0.0 { Err("Division by zero".into()) } + else { Ok(left / right) } + } + _ => unreachable!() + }; + } + } + Err(format!("Cannot parse expression: {expr}")) +} + +amplifier_guest::export_tool!(CalculatorTool); +``` + +**`amplifier.toml`:** +```toml +[module] +transport = "wasm" +type = "tool" +``` + +Build with: +```bash +cd examples/wasm-modules/calculator-tool +cargo component build --release +cp target/wasm32-wasip1/release/calculator_tool.wasm ../calculator-tool.wasm +``` + +### Step 3: Verify + +```bash +# The calculator tool resolves correctly +cargo test -p amplifier-core --features wasm -- calculator_tool_loads_and_resolves + +# The module resolver auto-detects it +cargo test -p amplifier-core --features wasm -- module_resolver +``` + +**Commit message:** `feat(dogfood): add calculator-tool example WASM module` + +--- + +## Task 5: Load WASM Tool in a Python Session + +**What:** Create `examples/python-wasm-session.py` — a minimal Python script that creates a session with a WASM tool, initializes it, and verifies the tool is mounted and callable. + +**Why:** This is the acid test for the Python host → PyO3 → Rust resolver → wasmtime → WASM tool pipeline. If this works, a real developer can write `{"module": "my-tool", "source": "/path/to/wasm/dir"}` in their config and it Just Works. + +**Files:** +- Create: `examples/python-wasm-session.py` + +### Step 1: Write the failing test + +The example script itself IS the test. Create `examples/python-wasm-session.py`: + +```python +#!/usr/bin/env python3 +"""Dogfood test: Load a WASM tool in a real Python session. + +This script proves the full pipeline: + Python host → PyO3 → Rust resolver → wasmtime → WASM tool + +Run from the amplifier-core directory: + python examples/python-wasm-session.py +""" +import asyncio +import os +import sys + +# Ensure amplifier_core is importable +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "python")) + + +async def main(): + from amplifier_core._engine import RustSession + + fixture_dir = os.path.join( + os.path.dirname(__file__), "..", "tests", "fixtures", "wasm" + ) + + config = { + "session": { + "orchestrator": "loop-basic", + "context": "context-simple", + }, + "tools": [ + { + "module": "echo-tool", + "source": fixture_dir, + "config": {}, + }, + ], + "providers": [], + "hooks": [], + } + + session = RustSession(config) + await session.initialize() + + # Check that the WASM tool is mounted + coord = session.coordinator + tools = coord.mount_points.get("tools", {}) + assert "echo-tool" in tools, f"WASM tool not mounted! Tools: {list(tools.keys())}" + print(f"✅ WASM tool 'echo-tool' is mounted in the coordinator") + + # Try calling it + tool = tools["echo-tool"] + spec = await tool.spec() + print(f"✅ Tool spec: name={spec.name}, params={len(spec.parameters)}") + + result = await tool.execute({"input": "hello from Python"}) + print(f"✅ Tool result: {result}") + + await session.cleanup() + print("✅ Session cleaned up successfully") + print("\n🎉 Full Python → WASM pipeline works!") + + +if __name__ == "__main__": + asyncio.run(main()) +``` + +### Step 2: Implement + +This task has no code to write beyond the script itself. The implementation work was done in Tasks 0–2. If the script doesn't work, it means Tasks 0–2 have a bug — go back and fix them. + +Common failure modes to check: +- `_session_init.py` doesn't pass `source` to `loader_dispatch` → fix Task 0 +- `loader_dispatch.py` can't find the `.wasm` file in the fixture dir → check resolver path handling +- `load_and_mount_wasm` doesn't bridge to Python mount points correctly → fix Task 1 +- The tool mounts but isn't callable (wrong Python wrapper) → fix Task 2 + +### Step 3: Verify + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core +python examples/python-wasm-session.py +``` + +Expected output: +``` +✅ WASM tool 'echo-tool' is mounted in the coordinator +✅ Tool spec: name=echo-tool, params=1 +✅ Tool result: ... +✅ Session cleaned up successfully + +🎉 Full Python → WASM pipeline works! +``` + +**Commit message:** `feat(dogfood): add Python WASM session example — proves full pipeline` + +--- + +## Task 6: Load WASM Tool from TypeScript Host + +**What:** Create `examples/node-wasm-session.ts` — a minimal TypeScript script that uses the Napi-RS bindings to load a WASM tool and call it. + +**Why:** Proves the TypeScript host → Napi-RS → Rust resolver → wasmtime → WASM tool pipeline. This is the second host language, and it must work independently of the Python host. + +**Files:** +- Create: `examples/node-wasm-session.ts` + +### Step 1: Write the failing test + +Create `examples/node-wasm-session.ts`: + +```typescript +/** + * Dogfood test: Load a WASM tool from the TypeScript/Node.js host. + * + * Proves the full pipeline: + * TypeScript host → Napi-RS → Rust resolver → wasmtime → WASM tool + * + * Run from the amplifier-core directory: + * npx ts-node examples/node-wasm-session.ts + * + * Or compile first: + * npx tsc examples/node-wasm-session.ts && node examples/node-wasm-session.js + */ + +import * as path from 'path'; + +// Import from the built Napi-RS bindings +const { resolveModule, loadWasmFromPath } = require('../bindings/node'); + +async function main() { + const fixtureDir = path.join(__dirname, '..', 'tests', 'fixtures', 'wasm'); + + // Step 1: Resolve the module + console.log('Resolving echo-tool from fixture directory...'); + const manifest = resolveModule(fixtureDir); + console.log(`✅ Resolved: transport=${manifest.transport}, type=${manifest.module_type}`); + + // Step 2: Load the WASM module + console.log('Loading WASM module...'); + const result = loadWasmFromPath(fixtureDir); + console.log(`✅ Loaded: status=${result.status}, type=${result.module_type}`); + + console.log('\n🎉 Full TypeScript → WASM pipeline works!'); +} + +main().catch((err) => { + console.error('❌ Failed:', err); + process.exit(1); +}); +``` + +### Step 2: Implement + +Like Task 5, the implementation was done in prior phases (Napi-RS bindings in Phase 4, Task 9). If this doesn't work, it's a bindings bug — fix in `bindings/node/src/lib.rs`. + +### Step 3: Verify + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core +npx ts-node examples/node-wasm-session.ts +``` + +Also run the existing Napi-RS test suite to confirm no regressions: + +```bash +cd bindings/node && npm test +``` + +**Commit message:** `feat(dogfood): add TypeScript WASM session example — proves Node.js pipeline` + +--- + +## Task 7: Mixed-Transport Session Test + +**What:** Create an integration test that runs a session with **both Python and WASM modules** loaded simultaneously — the ultimate dogfood test. + +**Why:** Amplifier sessions will commonly have a mix of Python modules (e.g., orchestrator, provider) and WASM modules (e.g., tools, hooks). This test proves the coordinator can handle modules from different transports in the same session. + +**Files:** +- Create: `tests/mixed_transport_e2e.py` + +### Step 1: Write the failing test + +Create `tests/mixed_transport_e2e.py`: + +```python +#!/usr/bin/env python3 +"""E2E test: Mixed Python + WASM modules in one session. + +This is the ultimate dogfood test. A single session loads: + - Python orchestrator (loop-basic) — loaded via importlib + - Python context manager (context-simple) — loaded via importlib + - WASM tool (echo-tool) — loaded via wasmtime + - WASM hook (deny-hook) — loaded via wasmtime + +All four modules must coexist and be callable in the same session. +""" +import asyncio +import os +import sys + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "python")) + + +async def test_mixed_transport_session(): + from amplifier_core._engine import RustSession + + wasm_fixture_dir = os.path.join( + os.path.dirname(__file__), "fixtures", "wasm" + ) + + config = { + "session": { + "orchestrator": "loop-basic", + "context": "context-simple", + }, + "tools": [ + { + "module": "echo-tool", + "source": wasm_fixture_dir, + "config": {}, + }, + ], + "hooks": [ + { + "module": "deny-hook", + "source": wasm_fixture_dir, + "config": {}, + }, + ], + "providers": [], + } + + session = RustSession(config) + await session.initialize() + coord = session.coordinator + + # ---- Verify all modules are mounted ---- + # Python orchestrator + assert coord.mount_points.get("orchestrator") is not None, \ + "Python orchestrator not mounted" + print("✅ Python orchestrator mounted") + + # Python context manager + assert coord.mount_points.get("context") is not None, \ + "Python context manager not mounted" + print("✅ Python context manager mounted") + + # WASM tool + tools = coord.mount_points.get("tools", {}) + assert "echo-tool" in tools, \ + f"WASM tool not mounted! Tools: {list(tools.keys())}" + print("✅ WASM echo-tool mounted") + + # WASM hook + hooks = coord.mount_points.get("hooks", {}) + assert "deny-hook" in hooks, \ + f"WASM hook not mounted! Hooks: {list(hooks.keys())}" + print("✅ WASM deny-hook mounted") + + # ---- Call the WASM tool ---- + tool = tools["echo-tool"] + result = await tool.execute({"input": "mixed-transport test"}) + print(f"✅ WASM tool returned: {result}") + + # ---- Fire a hook event ---- + hook_result = await coord.hooks.emit("tool:pre", {"tool": "echo-tool"}) + print(f"✅ Hook pipeline returned: {hook_result}") + + await session.cleanup() + print("\n🎉 Mixed Python + WASM session works!") + + +if __name__ == "__main__": + asyncio.run(test_mixed_transport_session()) +``` + +### Step 2: Implement + +This task has no new code to write. It exercises everything built in Tasks 0–6. If it fails, the failure will point to which integration seam is broken. + +Possible issues to debug: +- Multiple WASM modules from the same directory: the resolver needs to distinguish `echo-tool.wasm` vs `deny-hook.wasm` in the same `fixtures/wasm/` dir. The `source` path may need to point to a directory containing only one `.wasm` file, or the resolver may need a `module_name` hint. +- Hook mounting: WASM hooks need to register into the `HookRegistry`, not just the mount points dict. Check that the hook bridge registers itself correctly. + +### Step 3: Verify + +```bash +cd /home/bkrabach/dev/rust-devrust-core/amplifier-core +python tests/mixed_transport_e2e.py +``` + +Also run the full test battery to confirm nothing is broken: + +```bash +cargo test -p amplifier-core --features wasm +cd python && python -m pytest tests/ -x -q +``` + +**Commit message:** `test(dogfood): add mixed Python + WASM transport session E2E test` + +--- + +## Success Criteria + +After all 8 tasks are complete: + +| Criterion | How to verify | +|---|---| +| All existing Python tests still pass (zero regressions) | `cd python && python -m pytest tests/ -x -q` | +| All existing Rust tests still pass | `cargo test -p amplifier-core --features wasm` | +| All existing TypeScript tests still pass | `cd bindings/node && npm test` | +| A WASM tool loads and executes in a real Python session | `python examples/python-wasm-session.py` | +| A WASM tool loads and executes from a TypeScript host | `npx ts-node examples/node-wasm-session.ts` | +| Mixed Python + WASM modules work in the same session | `python tests/mixed_transport_e2e.py` | +| A developer can build a new WASM module using `amplifier-guest` + `cargo component build` and it loads automatically | The calculator-tool example proves this | +| `amplifier.toml` manifest path works for all 6 fixture types | `cargo test -- resolve_fixture_via_amplifier_toml` | + +## Current Test Counts (baseline — must not decrease) + +| Suite | Count | +|---|---| +| Rust lib tests (no WASM) | 390 | +| Rust WASM lib tests | 34 | +| Rust WASM E2E tests | 7 | +| Rust module resolver E2E tests | 14 | +| Python tests | ~465 | +| TypeScript Vitest tests | 64 | + +## Dependency Graph + +``` +Task 0 (wire _session_init) ─┐ +Task 1 (fix PyO3 binding) ───┼──→ Task 2 (fix loader_dispatch mount) ──→ Task 5 (Python session) + │ Task 7 (mixed transport) +Task 3 (amplifier.toml) ─────┘ +Task 4 (calculator tool) ──────────────────────────────────────────────→ Task 5 (can use calculator) +Task 6 (TypeScript session) — independent after Phase 4 Napi-RS bindings +``` + +Tasks 0, 1, 3, 4, and 6 can be worked on in parallel. +Tasks 2 and 5 depend on Tasks 0 and 1. +Task 7 depends on Task 2 (and transitively on 0 and 1). \ No newline at end of file diff --git a/docs/plans/2026-03-07-pr39-code-review-fixes.md b/docs/plans/2026-03-07-pr39-code-review-fixes.md new file mode 100644 index 0000000..a0472ee --- /dev/null +++ b/docs/plans/2026-03-07-pr39-code-review-fixes.md @@ -0,0 +1,1146 @@ +# PR #39 Code Review Fixes — Implementation Plan + +## Goal + +Address all Critical and High priority findings from the PR #39 code review +before merge (Critical) and before production deployment (High), ensuring the +cross-language SDK is secure, correct, and production-ready. + +## Background + +Four independent review agents (architecture, Rust quality, security audit, +bindings/coverage) converged on 6 Critical and 7 High findings in the ++34,727-line cross-language SDK PR. The code has zero compiler errors, zero +clippy warnings, and strong test coverage — the findings are all design-level +and semantic issues that static analysis cannot catch. + +Critical items are merge-blocking. High items block production deployment. +Medium, Low, and Test Gap items are follow-up work. + +## Approach + +Work through findings in dependency order: + +1. **C-02 + H-01** — WASM resource limits + WASI restriction (shared `create_linker_and_store`) +2. **C-03** — Hook fail-closed (quick, high-impact security fix) +3. **C-05** — Streaming endpoint fix (quick, isolated) +4. **C-06** — Guest SDK compile gate (quick, isolated) +5. **C-01** — gRPC authentication (larger, foundational for H-04) +6. **C-04** — Node.js detached instance fix (interim rename + tests) +7. **H-02** — Sanitize gRPC errors (12 sites, mechanical) +8. **H-03** — Path traversal check +9. **H-04** — Session ID routing (document limitation, depends on C-01) +10. **H-05** — WIT HTTP import removal +11. **H-06** — TypeScript type fixes +12. **H-07** — JSON payload size limits + +Each task follows TDD: write the failing test first, then implement the fix, +then verify all tests pass. + +--- + +## Task 1: C-03 — Hook Parse Failure Must Fail Closed (Deny, Not Continue) + +**Priority:** Critical — security controls can be silently bypassed +**Estimated effort:** 30 minutes +**Files:** +- `bindings/node/src/lib.rs` (line 353) +- `bindings/python/src/lib.rs` (line 172) +- `crates/amplifier-core/tests/` (new test) + +### 1a. Write failing test — Node bindings + +Add a test in `bindings/node/__tests__/hooks.test.ts` that registers a hook +returning invalid JSON and asserts the result action is `"deny"`: + +```typescript +it('returns Deny when hook handler returns invalid JSON (fail-closed)', async () => { + const registry = new JsHookRegistry() + registry.register( + 'tool:pre', + (_event: string, _data: string) => 'NOT VALID JSON {{{', + 10, + 'bad-json-hook' + ) + const result = await registry.emit('tool:pre', '{}') + expect(result.action).toBe('deny') + expect(result.reason).toContain('invalid') +}) +``` + +### 1b. Fix Node bindings — fail closed + +In `bindings/node/src/lib.rs`, replace the `unwrap_or_else` at line 353: + +```rust +// BEFORE (line 353-358): +let hook_result: HookResult = serde_json::from_str(&result_str).unwrap_or_else(|e| { + eprintln!( + "amplifier-core-node: failed to parse HookResult from JS handler: {e}. Defaulting to Continue." + ); + HookResult::default() +}); + +// AFTER: +let hook_result: HookResult = serde_json::from_str(&result_str).unwrap_or_else(|e| { + log::error!( + "SECURITY: Hook handler returned unparseable result — failing closed (Deny): {e}" + ); + HookResult { + action: HookAction::Deny, + reason: Some("Hook handler returned invalid response".to_string()), + ..Default::default() + } +}); +``` + +### 1c. Fix Python bindings — fail closed + +In `bindings/python/src/lib.rs`, replace the `unwrap_or_else` at line 172: + +```rust +// BEFORE (line 172-177): +let hook_result: HookResult = serde_json::from_str(&result_json).unwrap_or_else(|e| { + log::warn!( + "Failed to parse hook handler result JSON (defaulting to Continue): {e} — json: {result_json}" + ); + HookResult::default() +}); + +// AFTER: +let hook_result: HookResult = serde_json::from_str(&result_json).unwrap_or_else(|e| { + log::error!( + "SECURITY: Hook handler returned unparseable result — failing closed (Deny): {e} — json: {result_json}" + ); + HookResult { + action: HookAction::Deny, + reason: Some("Hook handler returned invalid response".to_string()), + ..Default::default() + } +}); +``` + +### 1d. Verify + +```bash +cd bindings/node && npm test -- --grep "fail-closed" +cd crates/amplifier-core && cargo test +``` + +--- + +## Task 2: C-05 — Fix Fake Streaming Endpoint + +**Priority:** Critical — silent send failure + misleading contract +**Estimated effort:** 30 minutes +**File:** `crates/amplifier-core/src/grpc_server.rs` (lines 99-107) + +### 2a. Write failing test + +Add a test in `crates/amplifier-core/src/grpc_server.rs` `mod tests` that +verifies the streaming endpoint logs on send failure (or at minimum, add +a test that exercises the streaming path and asserts it produces exactly +one response): + +```rust +#[tokio::test] +async fn streaming_endpoint_returns_single_response() { + use crate::testing::FakeProvider; + let coord = Arc::new(Coordinator::new(Default::default())); + let provider = Arc::new(FakeProvider::new("test-provider")); + coord.mount_provider(provider); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::CompleteWithProviderRequest { + provider_name: "test-provider".to_string(), + request: Some(/* minimal ChatRequest */), + }); + + let response = service.complete_with_provider_streaming(request).await.unwrap(); + let mut stream = response.into_inner(); + + let first = stream.next().await; + assert!(first.is_some(), "Should yield exactly one response"); + let second = stream.next().await; + assert!(second.is_none(), "Stream should end after single response"); +} +``` + +### 2b. Fix: log send failure + add doc comment + +In `crates/amplifier-core/src/grpc_server.rs`, replace lines 99-107: + +```rust +// BEFORE: +// Wrap in a one-shot stream: send the single response then drop the sender +// to signal end-of-stream to the client. +let (tx, rx) = tokio::sync::mpsc::channel(1); +let _ = tx.send(Ok(proto_response)).await; +// `tx` is dropped here, closing the channel and ending the stream. + +// AFTER: +// NOTE: This is a one-shot "streaming" endpoint — it awaits the full provider +// response, then sends it as a single stream element. True token-level streaming +// requires provider.complete_stream() → Stream, which is +// not yet implemented. This endpoint exists for proto/gRPC API compatibility +// so clients can use the streaming RPC shape ahead of the real implementation. +let (tx, rx) = tokio::sync::mpsc::channel(1); +if tx.send(Ok(proto_response)).await.is_err() { + log::debug!("Streaming client disconnected before response was sent"); +} +// `tx` is dropped here, closing the channel and ending the stream. +``` + +### 2c. Verify + +```bash +cd crates/amplifier-core && cargo test grpc_server +``` + +--- + +## Task 3: C-06 — Guest SDK Kernel Stubs Compile Gate + +**Priority:** Critical — runtime failure instead of compile-time failure +**Estimated effort:** 30 minutes +**Files:** +- `crates/amplifier-guest/src/lib.rs` (line 547-591) +- `crates/amplifier-guest/Cargo.toml` + +### 3a. Add `kernel-stub` feature to Cargo.toml + +In `crates/amplifier-guest/Cargo.toml`: + +```toml +[features] +default = ["kernel-stub"] +kernel-stub = [] +``` + +### 3b. Add compile_error! gate + +In `crates/amplifier-guest/src/lib.rs`, above the `pub mod kernel` block +(before line 553): + +```rust +#[cfg(all(target_arch = "wasm32", not(feature = "kernel-stub")))] +compile_error!( + "kernel:: functions are not yet wired to WIT imports. \ + Set feature = 'kernel-stub' for testing only." +); +``` + +### 3c. Verify + +```bash +# Normal build (with default kernel-stub feature) should pass: +cd crates/amplifier-guest && cargo check + +# Existing tests should still pass: +cargo test -p amplifier-guest +``` + +--- + +## Task 4: C-02 + H-01 — WASM Resource Limits + Restrict WASI Capabilities + +**Priority:** Critical (C-02) + High (H-01) — DoS via infinite loop / OOM + information leakage +**Estimated effort:** 4-6 hours +**Files:** +- `crates/amplifier-core/src/bridges/wasm_tool.rs` (lines 54-64) +- `crates/amplifier-core/src/bridges/mod.rs` +- All `wasm_*.rs` bridge files (they all import `create_linker_and_store`) +- `crates/amplifier-core/Cargo.toml` (no new deps needed) + +### 4a. Write failing test — epoch interruption kills infinite loop + +Create `crates/amplifier-core/tests/wasm_resource_limits.rs`: + +```rust +//! Verifies that WASM modules with infinite loops are terminated +//! by epoch interruption and do not hang indefinitely. + +#[cfg(feature = "wasm")] +#[tokio::test] +async fn infinite_loop_wasm_module_is_terminated() { + use std::time::{Duration, Instant}; + // Load the infinite-loop test fixture (a WASM component that runs `loop {}`) + let engine = amplifier_core::bridges::wasm_tool::create_wasm_engine().unwrap(); + let bytes = std::fs::read("tests/fixtures/wasm/infinite-loop/module.wasm").unwrap(); + + let start = Instant::now(); + let result = amplifier_core::bridges::wasm_tool::WasmToolBridge::from_bytes( + &bytes, engine, + ); + let elapsed = start.elapsed(); + + // Should fail (trap), not hang forever + assert!(result.is_err(), "Infinite loop should be trapped"); + assert!(elapsed < Duration::from_secs(60), "Should terminate within timeout"); +} +``` + +### 4b. Create engine factory with epoch interruption + +The shared engine is created implicitly via `Engine::new(&config)` at each +bridge's construction site. Centralize this into `bridges/mod.rs`: + +```rust +// bridges/mod.rs — add: + +#[cfg(feature = "wasm")] +use std::sync::Arc; +#[cfg(feature = "wasm")] +use wasmtime::Engine; + +/// Default WASM execution limits. +#[cfg(feature = "wasm")] +pub struct WasmLimits { + /// Maximum epoch ticks before trap (at ~100 ticks/sec, 3000 = 30 seconds). + pub max_epoch_ticks: u64, + /// Maximum memory in bytes (default: 64 MB). + pub max_memory_bytes: usize, +} + +#[cfg(feature = "wasm")] +impl Default for WasmLimits { + fn default() -> Self { + Self { + max_epoch_ticks: 3000, // ~30 seconds at 100Hz + max_memory_bytes: 64 << 20, // 64 MB + } + } +} + +/// Create a wasmtime Engine with epoch interruption enabled and a background +/// ticker thread that increments the epoch every 10ms (~100Hz). +#[cfg(feature = "wasm")] +pub fn create_wasm_engine() -> Result, Box> { + let mut config = wasmtime::Config::new(); + config.wasm_component_model(true); + config.epoch_interruption(true); + let engine = Arc::new(Engine::new(&config)?); + + // Background ticker — increments epoch every 10ms + let engine_clone = Arc::clone(&engine); + std::thread::spawn(move || { + loop { + std::thread::sleep(std::time::Duration::from_millis(10)); + engine_clone.increment_epoch(); + } + }); + + Ok(engine) +} +``` + +### 4c. Update `create_linker_and_store` — add limits + null I/O + +In `crates/amplifier-core/src/bridges/wasm_tool.rs`, update the shared function: + +```rust +// BEFORE (lines 54-64): +pub(crate) fn create_linker_and_store( + engine: &Engine, +) -> Result<(Linker, Store), Box> { + let mut linker = Linker::::new(engine); + wasmtime_wasi::p2::add_to_linker_sync(&mut linker)?; + let wasi = wasmtime_wasi::WasiCtxBuilder::new().build(); + let table = wasmtime::component::ResourceTable::new(); + let store = Store::new(engine, WasmState { wasi, table }); + Ok((linker, store)) +} + +// AFTER: +pub(crate) fn create_linker_and_store( + engine: &Engine, + limits: &super::WasmLimits, +) -> Result<(Linker, Store), Box> { + let mut linker = Linker::::new(engine); + wasmtime_wasi::p2::add_to_linker_sync(&mut linker)?; + + // H-01: Restrict WASI capabilities — null I/O, no inherited env/args + let wasi = wasmtime_wasi::WasiCtxBuilder::new() + .stdin(wasmtime_wasi::pipe::ClosedInputStream) + .stdout(wasmtime_wasi::pipe::SinkOutputStream) + .stderr(wasmtime_wasi::pipe::SinkOutputStream) + .build(); + + let table = wasmtime::component::ResourceTable::new(); + let mut store = Store::new(engine, WasmState { wasi, table }); + + // C-02: CPU time limit via epoch interruption + store.set_epoch_deadline(limits.max_epoch_ticks); + + // C-02: Memory limit via StoreLimitsBuilder + store.limiter(|state| &mut state.limiter); + + Ok((linker, store)) +} +``` + +Update `WasmState` to include the limiter: + +```rust +pub(crate) struct WasmState { + wasi: wasmtime_wasi::WasiCtx, + table: wasmtime::component::ResourceTable, + limiter: wasmtime::StoreLimits, +} +``` + +### 4d. Update all call sites + +Every file that calls `create_linker_and_store(engine)` must pass the limits +parameter. These files import from `super::wasm_tool::create_linker_and_store`: + +- `wasm_hook.rs:70` → `create_linker_and_store(engine, &WasmLimits::default())` +- `wasm_context.rs:95` → same +- `wasm_approval.rs:72` → same +- `wasm_orchestrator.rs:372` → same +- `wasm_provider.rs:81,93,114,136` → same +- `wasm_tool.rs:104,118` → same + +### 4e. Verify + +```bash +cd crates/amplifier-core && cargo test --features wasm +cargo clippy --features wasm +``` + +--- + +## Task 5: C-01 — Add Authentication to KernelService gRPC Server + +**Priority:** Critical — all 9 RPCs completely unauthenticated +**Estimated effort:** 4-6 hours +**Files:** +- `crates/amplifier-core/src/grpc_server.rs` +- `crates/amplifier-core/src/lib.rs` (re-export) + +### 5a. Write failing test — unauthenticated request is rejected + +In `crates/amplifier-core/src/grpc_server.rs` `mod tests`: + +```rust +#[tokio::test] +async fn unauthenticated_request_is_rejected() { + let coord = Arc::new(Coordinator::new(Default::default())); + let service = KernelServiceImpl::new(coord); + let interceptor = auth_interceptor("test-secret-token"); + + // Request WITHOUT the auth token + let request = Request::new(amplifier_module::EmitHookRequest { + event: "test:event".to_string(), + data_json: "{}".to_string(), + }); + + let result = interceptor(request); + assert!(result.is_err()); + assert_eq!(result.unwrap_err().code(), tonic::Code::Unauthenticated); +} + +#[tokio::test] +async fn authenticated_request_is_accepted() { + let token = "test-secret-token"; + let mut request = Request::new(()); + request.metadata_mut().insert( + "x-amplifier-token", + token.parse().unwrap(), + ); + + let interceptor = auth_interceptor(token); + let result = interceptor(request); + assert!(result.is_ok()); +} +``` + +### 5b. Implement shared-secret interceptor + +Add to `crates/amplifier-core/src/grpc_server.rs`: + +```rust +use tonic::service::Interceptor; + +/// Creates a tonic interceptor that validates a shared secret token. +/// +/// The token must be passed via the `x-amplifier-token` metadata header. +/// If the token is missing or does not match, the request is rejected with +/// `UNAUTHENTICATED`. +pub fn auth_interceptor( + expected_token: impl Into, +) -> impl Fn(Request<()>) -> Result, Status> + Clone { + let expected = expected_token.into(); + move |req: Request<()>| { + let token = req + .metadata() + .get("x-amplifier-token") + .and_then(|v| v.to_str().ok()); + match token { + Some(t) if t == expected => Ok(req), + _ => Err(Status::unauthenticated("missing or invalid token")), + } + } +} +``` + +### 5c. Wire interceptor into server construction + +Update the public API for constructing the gRPC server (wherever +`KernelServiceServer::new(service)` is called) to use +`KernelServiceServer::with_interceptor(service, auth_interceptor(token))`. + +Add a builder method to `KernelServiceImpl`: + +```rust +impl KernelServiceImpl { + /// Build a tonic Router with authentication enabled. + /// + /// `token` is the shared secret that out-of-process modules must include + /// in the `x-amplifier-token` gRPC metadata header. + pub fn into_router( + self, + token: &str, + ) -> tonic::transport::server::Router { + let svc = amplifier_module::kernel_service_server::KernelServiceServer::with_interceptor( + self, + auth_interceptor(token), + ); + tonic::transport::Server::builder().add_service(svc) + } +} +``` + +### 5d. Generate token and pass to child modules + +Token generation: use `uuid::Uuid::new_v4().to_string()` as the per-session +shared secret. The token is generated when the gRPC server starts and passed +to child module processes via the `AMPLIFIER_TOKEN` environment variable. + +### 5e. Verify + +```bash +cd crates/amplifier-core && cargo test grpc_server +``` + +--- + +## Task 6: C-04 — Node.js Detached Instance Fix (Interim) + +**Priority:** Critical — entire Node.js hook system is non-functional +**Estimated effort:** 1-2 hours (interim rename + warnings + tests) +**Files:** +- `bindings/node/src/lib.rs` (lines 535-541, 648-661) +- `bindings/node/index.d.ts` (lines 157, 190) +- `bindings/node/__tests__/` (new tests) + +### 6a. Write tests pinning detached-instance behavior + +In `bindings/node/__tests__/coordinator.test.ts`: + +```typescript +it('createHookRegistry() creates a new instance each call', () => { + const coord = new JsCoordinator('{}') + const h1 = coord.createHookRegistry() + const h2 = coord.createHookRegistry() + expect(h1).not.toBe(h2) // pins known detached behavior +}) +``` + +In `bindings/node/__tests__/session.test.ts`: + +```typescript +it('createCoordinator() creates a new instance each call', () => { + const session = new JsAmplifierSession('{}') + const c1 = session.createCoordinator() + const c2 = session.createCoordinator() + expect(c1).not.toBe(c2) // pins known detached behavior +}) +``` + +### 6b. Rename getters to factory methods + +In `bindings/node/src/lib.rs`: + +**JsCoordinator — rename `hooks` getter to `createHookRegistry` method:** + +```rust +// BEFORE (line 538-540): +#[napi(getter)] +pub fn hooks(&self) -> JsHookRegistry { + JsHookRegistry::new_detached() +} + +// AFTER: +/// Creates a new, detached HookRegistry instance. +/// +/// WARNING: Each call returns a separate, empty registry — handlers registered +/// on one instance are NOT visible to another. Cache the result if you need +/// a shared registry. Structural fix tracked as Future TODO #1. +#[napi] +pub fn create_hook_registry(&self) -> JsHookRegistry { + log::warn!( + "JsCoordinator.createHookRegistry(): returns a detached instance. \ + Cache the result — each call creates a new empty registry." + ); + JsHookRegistry::new_detached() +} +``` + +**JsAmplifierSession — rename `coordinator` getter to `createCoordinator` method:** + +```rust +// BEFORE (line 656-661): +#[napi(getter)] +pub fn coordinator(&self) -> JsCoordinator { + JsCoordinator { + inner: Arc::new(amplifier_core::Coordinator::new(self.cached_config.clone())), + } +} + +// AFTER: +/// Creates a new Coordinator instance from cached config. +/// +/// WARNING: Each call allocates a fresh Coordinator — state is NOT shared +/// between instances. Cache the result. Structural fix tracked as Future TODO #1. +#[napi] +pub fn create_coordinator(&self) -> JsCoordinator { + log::warn!( + "JsAmplifierSession.createCoordinator(): returns a new instance. \ + Cache the result — state is not shared between instances." + ); + JsCoordinator { + inner: Arc::new(amplifier_core::Coordinator::new(self.cached_config.clone())), + } +} +``` + +### 6c. Update TypeScript type definitions + +In `bindings/node/index.d.ts`, replace the getter declarations: + +```typescript +// In JsCoordinator (was: get hooks(): JsHookRegistry): +/** + * Creates a new, detached HookRegistry instance. + * + * WARNING: Each call returns a separate empty registry. Cache the result. + */ +createHookRegistry(): JsHookRegistry + +// In JsAmplifierSession (was: get coordinator(): JsCoordinator): +/** + * Creates a new Coordinator from cached config. + * + * WARNING: Each call creates a fresh instance. Cache the result. + */ +createCoordinator(): JsCoordinator +``` + +### 6d. Update any existing tests/examples that use the old getter names + +Search for `coord.hooks`, `session.coordinator`, `.hooks.` in test files +and update to the new method names. + +### 6e. Verify + +```bash +cd bindings/node && npm test +``` + +--- + +## Task 7: H-02 — Sanitize gRPC Error Messages (12 Sites) + +**Priority:** High — internal error details exposed to callers +**Estimated effort:** 1-2 hours +**File:** `crates/amplifier-core/src/grpc_server.rs` — 12 sites + +### 7a. Write test — error messages do not contain internal details + +```rust +#[tokio::test] +async fn tool_execution_error_does_not_leak_internals() { + use crate::testing::FailingTool; + let coord = Arc::new(Coordinator::new(Default::default())); + let tool = Arc::new(FailingTool::new("secret-tool", "internal DB error at /var/lib/data.db")); + coord.mount_tool(tool); + let service = KernelServiceImpl::new(coord); + + let request = Request::new(amplifier_module::ExecuteToolRequest { + tool_name: "secret-tool".to_string(), + input_json: "{}".to_string(), + }); + + let err = service.execute_tool(request).await.unwrap_err(); + let msg = err.message(); + assert!(!msg.contains("/var/lib"), "Error should not contain file paths"); + assert!(!msg.contains("DB error"), "Error should not contain internal details"); +} +``` + +### 7b. Fix all 12 sites + +Pattern: log the full error server-side, return a generic message to the caller. + +| Line | Current | Fixed | +|------|---------|-------| +| 47 | `Status::not_found(format!("Provider not mounted: {provider_name}"))` | `log::debug!("Provider lookup failed: {provider_name}"); Status::not_found("Provider not available")` | +| 63 | `Status::internal(format!("Provider completion failed: {e}"))` | `log::error!("Provider completion failed for {provider_name}: {e}"); Status::internal("Provider completion failed")` | +| 95 | `Status::internal(format!("Provider completion failed: {e}"))` | Same pattern as line 63 | +| 121 | `Status::not_found(format!("Tool not found: {tool_name}"))` | `log::debug!("Tool lookup failed: {tool_name}"); Status::not_found("Tool not available")` | +| 125 | `Status::invalid_argument(format!("Invalid input JSON: {e}"))` | `Status::invalid_argument("Invalid input JSON")` (safe — no internals) | +| 154 | `Status::internal(format!("Tool execution failed: {e}"))` | `log::error!("Tool execution failed for {tool_name}: {e}"); Status::internal("Tool execution failed")` | +| 168 | `Status::invalid_argument(format!("Invalid data_json: {e}"))` | `Status::invalid_argument("Invalid data_json")` | +| 186 | Same pattern | Same fix | +| 228 | `Status::internal(format!("Failed to get messages: {e}"))` | `log::error!("Failed to get messages: {e}"); Status::internal("Failed to get messages")` | +| 259 | `Status::invalid_argument(format!("Invalid message: {e}"))` | `Status::invalid_argument("Invalid message format")` | +| 262 | `Status::internal(format!("Failed to serialize message: {e}"))` | `log::error!("Failed to serialize message: {e}"); Status::internal("Failed to process message")` | +| 272 | `Status::internal(format!("Failed to add message: {e}"))` | `log::error!("Failed to add message: {e}"); Status::internal("Failed to add message")` | + +### 7c. Verify + +```bash +cd crates/amplifier-core && cargo test grpc_server +``` + +--- + +## Task 8: H-03 — Path Traversal Check in Module Resolver + +**Priority:** High — arbitrary filesystem read via malicious amplifier.toml +**Estimated effort:** 1 hour +**File:** `crates/amplifier-core/src/module_resolver.rs` (lines 158-167) + +### 8a. Write failing test + +In `crates/amplifier-core/tests/module_resolver_e2e.rs`: + +```rust +#[test] +fn artifact_path_traversal_is_rejected() { + let dir = tempfile::tempdir().unwrap(); + let toml_content = r#" +[module] +transport = "wasm" +type = "tool" +artifact = "../../../etc/passwd" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml_content).unwrap(); + + let result = amplifier_core::module_resolver::resolve(dir.path()); + assert!(result.is_err(), "Path traversal should be rejected"); + let err_msg = format!("{}", result.unwrap_err()); + assert!( + err_msg.contains("simple filename") || err_msg.contains("escapes"), + "Error should mention path restriction: {err_msg}" + ); +} + +#[test] +fn artifact_with_dot_prefix_is_rejected() { + let dir = tempfile::tempdir().unwrap(); + let toml_content = r#" +[module] +transport = "wasm" +type = "tool" +artifact = ".hidden-module.wasm" +"#; + std::fs::write(dir.path().join("amplifier.toml"), toml_content).unwrap(); + + let result = amplifier_core::module_resolver::resolve(dir.path()); + assert!(result.is_err(), "Dot-prefixed artifact should be rejected"); +} +``` + +### 8b. Fix: validate artifact is a simple filename + +In `crates/amplifier-core/src/module_resolver.rs`, after extracting +`wasm_filename` (line 162), add validation: + +```rust +Transport::Wasm => { + let wasm_filename = module_section + .get("artifact") + .and_then(|v| v.as_str()) + .unwrap_or("module.wasm"); + + // Reject path separators and dot-prefixed names — artifact must be + // a simple filename within the module directory. + if wasm_filename.contains('/') + || wasm_filename.contains('\\') + || wasm_filename.starts_with('.') + { + return Err(ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: format!( + "artifact must be a simple filename (got '{wasm_filename}'). \ + Path separators and dot-prefixed names are not allowed." + ), + }); + } + + let wasm_path = module_path.join(wasm_filename); + + // Defense in depth: verify resolved path stays inside module directory + // (only possible when the file exists on disk; skip for deferred-load) + if wasm_path.exists() { + let canonical = wasm_path.canonicalize().map_err(|e| { + ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: format!("Failed to canonicalize artifact path: {e}"), + } + })?; + let canonical_base = module_path.canonicalize().map_err(|e| { + ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: format!("Failed to canonicalize module path: {e}"), + } + })?; + if !canonical.starts_with(&canonical_base) { + return Err(ModuleResolverError::TomlParseError { + path: module_path.to_path_buf(), + reason: "artifact path escapes module directory".to_string(), + }); + } + } + + ModuleArtifact::WasmBytes { + bytes: Vec::new(), + path: wasm_path, + } +} +``` + +### 8c. Verify + +```bash +cd crates/amplifier-core && cargo test module_resolver +``` + +--- + +## Task 9: H-04 — Document Session ID Routing Limitation + +**Priority:** High — cross-session access possible (depends on C-01 for full fix) +**Estimated effort:** 30 minutes +**File:** `crates/amplifier-core/src/grpc_server.rs` (lines 216-218, 248-252) + +### 9a. Add log warning + doc comments + +At the `get_messages` method: + +```rust +async fn get_messages( + &self, + _request: Request, +) -> Result, Status> { + // TODO(security): session_id in GetMessagesRequest is currently ignored. + // All connected modules share a single context (the coordinator's). + // Per-session routing requires caller identity from authentication (C-01) + // and a session-to-coordinator mapping. Tracked as post-merge follow-up. + log::debug!( + "get_messages: session_id routing not yet implemented — \ + returning shared coordinator context" + ); + // ...existing code... +``` + +Same pattern for `add_message`: + +```rust +async fn add_message( + &self, + request: Request, +) -> Result, Status> { + let req = request.into_inner(); + // TODO(security): session_id is currently ignored — messages are added + // to the shared coordinator context regardless of which session the + // caller claims. Per-session isolation requires C-01 (authentication). + if !req.session_id.is_empty() { + log::warn!( + "add_message: session_id '{}' provided but per-session routing \ + is not yet implemented — message added to shared context", + req.session_id + ); + } + // ...existing code unchanged... +``` + +### 9b. Verify + +```bash +cd crates/amplifier-core && cargo test grpc_server +``` + +--- + +## Task 10: H-05 — Remove HTTP Import from WIT + +**Priority:** High — declared interface not provided at runtime +**Estimated effort:** 30 minutes +**File:** `wit/amplifier-modules.wit` (lines 140-144) + +### 10a. Write test — provider module world does not require HTTP + +Verify the existing provider WASM test fixture still compiles/loads after +removing the HTTP import. (If no provider fixture exists, this is a +compile-only check.) + +### 10b. Remove the HTTP import + +In `wit/amplifier-modules.wit`, change: + +```wit +// BEFORE: +/// Tier 2: Provider module — needs outbound HTTP for LLM API calls. +world provider-module { + import wasi:http/outgoing-handler@0.2.0; + export provider; +} + +// AFTER: +/// Tier 2: Provider module. +/// +/// NOTE: HTTP outbound is not yet supported. Provider modules that need +/// network access should use the gRPC transport (out-of-process) for now. +/// When wasi:http support is added, it will be gated behind an explicit +/// allow-list configuration. +world provider-module { + export provider; +} +``` + +### 10c. Verify + +```bash +cd crates/amplifier-core && cargo check --features wasm +cargo test --features wasm +``` + +--- + +## Task 11: H-06 — Fix TypeScript Type Definitions + +**Priority:** High — type safety eliminated for key APIs +**Estimated effort:** 1 hour +**File:** `bindings/node/index.d.ts` + +### 11a. Fix `status` getter return type + +```typescript +// BEFORE (line 179): +get status(): string + +// AFTER: +get status(): 'running' | 'completed' | 'failed' | 'cancelled' +``` + +### 11b. Fix hook handler signature + +```typescript +// BEFORE (line 130): +register(event: string, handler: (...args: any[]) => any, priority: number, name: string): void + +// AFTER: +register( + event: string, + handler: (event: string, dataJson: string) => string | Promise, + priority: number, + name: string +): void +``` + +### 11c. Fix JsModuleManifest string literal unions + +```typescript +// BEFORE (lines 83-87): +transport: string +moduleType: string +artifactType: string + +// AFTER: +transport: 'python' | 'wasm' | 'grpc' | 'native' +moduleType: 'tool' | 'hook' | 'context' | 'approval' | 'provider' | 'orchestrator' | 'resolver' +artifactType: 'wasm' | 'grpc' | 'python' +``` + +### 11d. Verify + +```bash +cd bindings/node && npx tsc --noEmit +npm test +``` + +--- + +## Task 12: H-07 — Add JSON Payload Size Limits in gRPC Server + +**Priority:** High — DoS via unbounded JSON payloads +**Estimated effort:** 1 hour +**File:** `crates/amplifier-core/src/grpc_server.rs` + +### 12a. Write failing test — oversized payload is rejected + +```rust +#[tokio::test] +async fn oversized_json_payload_is_rejected() { + let coord = Arc::new(Coordinator::new(Default::default())); + coord.mount_tool(Arc::new(crate::testing::EchoTool::new("echo"))); + let service = KernelServiceImpl::new(coord); + + // 1 MB of nested JSON — exceeds 64 KB limit + let huge_json = "{\"a\":".repeat(1024 * 128) + "null" + &"}".repeat(1024 * 128); + + let request = Request::new(amplifier_module::ExecuteToolRequest { + tool_name: "echo".to_string(), + input_json: huge_json, + }); + + let err = service.execute_tool(request).await.unwrap_err(); + assert_eq!(err.code(), tonic::Code::InvalidArgument); + assert!(err.message().contains("exceeds maximum size")); +} +``` + +### 12b. Add size validation helper + apply to all JSON fields + +At the top of `grpc_server.rs`: + +```rust +/// Maximum allowed size for JSON string fields in gRPC requests. +const MAX_JSON_PAYLOAD_BYTES: usize = 64 * 1024; // 64 KB + +/// Validates that a JSON string field does not exceed the maximum size. +fn validate_json_size(json: &str, field_name: &str) -> Result<(), Status> { + if json.len() > MAX_JSON_PAYLOAD_BYTES { + Err(Status::invalid_argument(format!( + "{field_name} exceeds maximum size of {MAX_JSON_PAYLOAD_BYTES} bytes" + ))) + } else { + Ok(()) + } +} +``` + +Apply before every `serde_json::from_str` call: + +```rust +// execute_tool (line 124): +validate_json_size(&req.input_json, "input_json")?; + +// emit_hook (line 164): +validate_json_size(&req.data_json, "data_json")?; + +// emit_hook_and_collect (line 185): +validate_json_size(&req.data_json, "data_json")?; + +// register_capability (line 343): +validate_json_size(&req.value_json, "value_json")?; +``` + +### 12c. Verify + +```bash +cd crates/amplifier-core && cargo test grpc_server +``` + +--- + +## Verification — Full Suite + +After all tasks are complete, run the full verification: + +```bash +# Rust — all crates +cd /path/to/amplifier-core +cargo fmt --check +cargo clippy --all-features -- -D warnings +cargo test --all-features + +# Node bindings +cd bindings/node +npm test +npx tsc --noEmit + +# Python bindings (if test infrastructure exists) +cd bindings/python +maturin develop +pytest +``` + +--- + +## Follow-Up Work (Not Tasked — Track as Issues) + +The following items were identified in the code review but are not +merge-blocking or production-blocking. They should be tracked as follow-up +issues and addressed in subsequent PRs. + +### Medium Priority (M-01 through M-09) + +| ID | Summary | File(s) | +|----|---------|---------| +| M-01 | Extract duplicated `get_typed_func` across 4+ WASM bridges into `bridges/mod.rs` | `wasm_tool.rs`, `wasm_provider.rs`, `wasm_context.rs`, `wasm_hook.rs`, `wasm_orchestrator.rs` | +| M-02 | Extract `to_json_or_warn` / `from_json_or_warn` helpers (30+ repeated sites) | `generated/conversions.rs` | +| M-03 | Replace `unwrap()` with `swap_remove(0)` in module resolver match arm | `module_resolver.rs:71` | +| M-04 | Separate `WasmPath(PathBuf)` from `WasmBytes` to eliminate deferred-load ambiguity | `module_resolver.rs:164` | +| M-05 | Add `debug_assert!` for `block_on` context safety in orchestrator bridge | `wasm_orchestrator.rs:111,147,185,215,244` | +| M-06 | Wire or remove `_reason` parameter in Node.js cancellation methods | `bindings/node/src/lib.rs:301-313` | +| M-07 | Remove dead exported types (`JsToolResult`, `JsToolSpec`, `JsSessionConfig`, `Role`) or wire into API | `bindings/node/src/lib.rs`, `index.d.ts` | +| M-08 | Add WASM module integrity verification (sha256 hash in `amplifier.toml`) | `module_resolver.rs:271-290` | +| M-09 | Clamp `timeout_seconds` in `EmitHookAndCollect` to max 300s | `grpc_server.rs:189-193` | + +### Low Priority / Style (L-01 through L-07) + +| ID | Summary | File(s) | +|----|---------|---------| +| L-01 | Replace `eprintln!` with `log::warn!` in Node bindings (3 sites) | `bindings/node/src/lib.rs:338,353,750` | +| L-02 | Remove duplicate stale doc comment on `export_tool!` macro | `amplifier-guest/src/lib.rs:30-54` | +| L-03 | Fix float sentinel 0.0 conflation with "not set" in proto conversions | `generated/conversions.rs:849-858` | +| L-04 | Make `ContentBlock::None` return error instead of silent empty text | `generated/conversions.rs:484-491` | +| L-05 | Narrow `unsafe impl Sync` safety comment for `PyHookHandlerBridge` | `bindings/python/src/lib.rs:67-68` | +| L-06 | Replace `blocking_lock()` with `try_lock()` in Python bindings (panic risk in async) | `bindings/python/src/lib.rs:389,417,505` | +| L-07 | Add name validation + overwrite protection to `register_capability` | `grpc_server.rs:338-347` | + +### Test Coverage Gaps (TG-01 through TG-08) + +| ID | Summary | File(s) | +|----|---------|---------| +| TG-01 | Add async (Promise-returning) hook handler test | `bindings/node/__tests__/hooks.test.ts` | +| TG-02 | Add invalid/corrupted WASM bytes test | `crates/amplifier-core/tests/wasm_e2e.rs` | +| TG-03 | Add JS-side error path tests for `resolveModule` / `loadWasmFromPath` | `bindings/node/__tests__/node-wasm-session.test.ts` | +| TG-04 | Add tests pinning detached coordinator/hooks behavior | `bindings/node/__tests__/` | +| TG-05 | Add hook handler throw test | `bindings/node/__tests__/hooks.test.ts` | +| TG-06 | Add test for directory with multiple `.wasm` files without `amplifier.toml` | `tests/module_resolver_e2e.rs` | +| TG-07 | Add test for invalid `transport` value in `amplifier.toml` | `tests/module_resolver_e2e.rs` | +| TG-08 | Add mixed-transport failure path test (WASM tool fails mid-session) | `tests/mixed_transport_e2e.rs` | + +### Dependency Updates + +| Item | Action | +|------|--------| +| `rand = "0.8"` → `"0.9"` | Update in `crates/amplifier-core/Cargo.toml`, fix any breaking API changes | +| Add `cargo-audit` to CI | Add `cargo audit` step to GitHub Actions workflow | + +--- + +## Open Questions + +1. **C-01 token transport:** Should the token be passed via environment + variable (`AMPLIFIER_TOKEN`), command-line argument, or a token file? + Environment variable is simplest but visible in `/proc/*/environ`. + Token file with `chmod 0600` is more secure. + +2. **C-02 epoch limits:** What are appropriate default values for + `max_epoch_ticks` and `max_memory_bytes`? The plan uses 3000 ticks + (~30 seconds) and 64 MB. These may need tuning based on real workloads. + +3. **C-04 structural fix timeline:** The interim rename + warnings approach + is correct for pre-merge, but the structural fix (Arc-wrapping the + Coordinator's HookRegistry) should be prioritized. Should this be a + separate follow-up PR or part of this batch? + +4. **H-05 HTTP removal:** Does any existing provider module test fixture + compile against the `provider-module` world with the HTTP import? If so, + that fixture needs updating when the import is removed. \ No newline at end of file diff --git a/examples/node-wasm-session.ts b/examples/node-wasm-session.ts new file mode 100644 index 0000000..1144f2d --- /dev/null +++ b/examples/node-wasm-session.ts @@ -0,0 +1,49 @@ +/** + * Node.js WASM Session Example + * + * Proves the TypeScript host → Napi-RS → Rust resolver → wasmtime → WASM tool pipeline. + * + * Run with: + * npx ts-node examples/node-wasm-session.ts + * node --experimental-strip-types examples/node-wasm-session.ts + * Or compile first: + * npx tsc --esModuleInterop --module commonjs --target ES2022 --types node examples/node-wasm-session.ts + * node examples/node-wasm-session.js + */ + +const { resolveModule, loadWasmFromPath } = require('../bindings/node/index.js') +const path = require('path') +const fs = require('fs') +const { tmpdir } = require('os') + +// The fixture directory contains multiple .wasm files; readdir order is +// filesystem-dependent. Copy just the echo-tool fixture into a temp directory +// so the resolver deterministically picks it. +const fixtureBase = path.resolve(__dirname, '..', 'tests', 'fixtures', 'wasm') +const fixtureDir = fs.mkdtempSync(path.join(tmpdir(), 'amplifier-node-wasm-')) +fs.copyFileSync( + path.join(fixtureBase, 'echo-tool.wasm'), + path.join(fixtureDir, 'echo-tool.wasm') +) + +try { + // Step 1: Resolve the module + console.log(`Resolving module from: ${fixtureDir}`) + const manifest = resolveModule(fixtureDir) + console.log(` transport: ${manifest.transport}`) + console.log(` module_type: ${manifest.moduleType}`) + + // Step 2: Load the WASM module + console.log(`\nLoading WASM module from: ${fixtureDir}`) + const status = loadWasmFromPath(fixtureDir) + console.log(` status: ${status}`) + console.log(` module_type: ${manifest.moduleType}`) + + // Success + console.log('\nTypeScript → Napi-RS → Rust resolver → wasmtime → WASM tool pipeline: SUCCESS') +} catch (err) { + console.error(err) + process.exit(1) +} finally { + fs.rmSync(fixtureDir, { recursive: true }) +} diff --git a/examples/python-wasm-session.py b/examples/python-wasm-session.py new file mode 100644 index 0000000..dd0ff1c --- /dev/null +++ b/examples/python-wasm-session.py @@ -0,0 +1,195 @@ +"""Python WASM Session Example + +Proves the Python host → PyO3 → Rust resolver → wasmtime → WASM tool pipeline. + +Run with: + python examples/python-wasm-session.py + +Requires the dev-branch amplifier_core with resolve_module / load_wasm_from_path +bindings compiled in (the .venv built from this repo has them): + .venv/bin/python examples/python-wasm-session.py + +Notes +----- +WASM compilation via wasmtime is slow on ARM64 (aarch64). The script runs +the resolve step unconditionally (fast — proves Python → PyO3 → Rust resolver) +and attempts the full WASM load in a child process with a generous timeout. +If the timeout fires the script still exits 0 with a clear explanatory note. +""" + +import importlib +import os +import shutil +import subprocess +import sys +import tempfile + +# --------------------------------------------------------------------------- +# Locate the echo-tool fixture relative to this script +# --------------------------------------------------------------------------- + +SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) +FIXTURE_BASE = os.path.join(SCRIPT_DIR, "..", "tests", "fixtures", "wasm") +ECHO_TOOL_WASM = os.path.join(FIXTURE_BASE, "echo-tool.wasm") + +if not os.path.isfile(ECHO_TOOL_WASM): + print(f"ERROR: WASM fixture not found: {ECHO_TOOL_WASM}", file=sys.stderr) + print(" Build fixtures first: cd tests/fixtures/wasm && bash build-fixtures.sh") + sys.exit(1) + +# --------------------------------------------------------------------------- +# Step 0 — verify the PyO3 module exposes the required symbols +# --------------------------------------------------------------------------- + +print("Checking amplifier_core._engine symbols...") +_engine = importlib.import_module("amplifier_core._engine") +_missing = [ + sym + for sym in ("resolve_module", "load_wasm_from_path", "load_and_mount_wasm") + if not hasattr(_engine, sym) +] +if _missing: + print(f" FAIL: missing symbols: {_missing}", file=sys.stderr) + print( + " Rebuild the Python bindings with:" + " maturin develop --manifest-path bindings/python/Cargo.toml", + file=sys.stderr, + ) + sys.exit(1) + +print(" resolve_module: OK") +print(" load_wasm_from_path: OK") +print(" load_and_mount_wasm: OK") + +resolve_module = _engine.resolve_module + +# --------------------------------------------------------------------------- +# Helper: build an isolated temp directory for the resolver. +# The directory must contain: +# amplifier.toml — declares transport = "wasm", type = "tool" +# module.wasm — the compiled WASM binary (default name the resolver looks for) +# --------------------------------------------------------------------------- + + +def make_wasm_fixture_dir(base_tmpdir: str) -> str: + """Copy echo-tool fixture into a clean directory the resolver can scan.""" + fixture_dir = os.path.join(base_tmpdir, "echo-tool") + os.makedirs(fixture_dir, exist_ok=True) + with open(os.path.join(fixture_dir, "amplifier.toml"), "w") as fh: + fh.write('[module]\ntransport = "wasm"\ntype = "tool"\n') + shutil.copy(ECHO_TOOL_WASM, os.path.join(fixture_dir, "module.wasm")) + return fixture_dir + + +# --------------------------------------------------------------------------- +# Step 1 — resolve_module: Python → PyO3 → Rust resolver +# This step is fast (<1 ms) and proves the bridge works end-to-end. +# --------------------------------------------------------------------------- + +print("\nStep 1: resolve_module (Python → PyO3 → Rust resolver)") + +with tempfile.TemporaryDirectory(prefix="amplifier-py-wasm-") as tmpdir: + fixture_dir = make_wasm_fixture_dir(tmpdir) + print(f" fixture dir: {fixture_dir}") + + manifest = resolve_module(fixture_dir) + print(f" transport: {manifest['transport']}") + print(f" module_type: {manifest['module_type']}") + + if manifest["transport"] != "wasm": + print(f" FAIL: expected transport='wasm', got '{manifest['transport']}'") + sys.exit(1) + if manifest["module_type"] != "tool": + print(f" FAIL: expected module_type='tool', got '{manifest['module_type']}'") + sys.exit(1) + + print(" resolve_module: PASS") + +# --------------------------------------------------------------------------- +# Step 2 — load_wasm_from_path: Rust resolver → wasmtime → WASM tool +# WASM compilation via wasmtime-cranelift can be very slow on ARM64. +# We run it in a child process so the parent can impose a wall-clock timeout +# without hanging indefinitely. +# --------------------------------------------------------------------------- + +WASM_LOAD_TIMEOUT = int(os.environ.get("AMPLIFIER_WASM_LOAD_TIMEOUT", "300")) + +print("\nStep 2: load_wasm_from_path (Rust resolver → wasmtime → WASM tool)") +print(f" timeout: {WASM_LOAD_TIMEOUT}s (override with AMPLIFIER_WASM_LOAD_TIMEOUT)") + +# Inline Python passed to the child process via -c. +# sys.argv[1] receives the path to the .wasm fixture file. +_child_script = r""" +import os, sys, tempfile, shutil + +fixture = sys.argv[1] +from amplifier_core._engine import load_wasm_from_path + +with tempfile.TemporaryDirectory(prefix="amplifier-py-wasm-child-") as tmpdir: + fixture_dir = os.path.join(tmpdir, "echo-tool") + os.makedirs(fixture_dir) + with open(os.path.join(fixture_dir, "amplifier.toml"), "w") as fh: + fh.write('[module]\ntransport = "wasm"\ntype = "tool"\n') + shutil.copy(fixture, os.path.join(fixture_dir, "module.wasm")) + + result = load_wasm_from_path(fixture_dir) + print(f"status: {result['status']}") + print(f"module_type: {result['module_type']}") + assert result["status"] == "loaded", f"unexpected status: {result['status']}" + assert result["module_type"] == "tool", f"unexpected module_type: {result['module_type']}" +""" + +# wasm_load_ok: True = pass, False = fail, None = skipped (timeout) +wasm_load_ok = None +proc = None +try: + proc = subprocess.Popen( + [sys.executable, "-c", _child_script, ECHO_TOOL_WASM], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + ) + stdout, stderr = proc.communicate(timeout=WASM_LOAD_TIMEOUT) + + if proc.returncode == 0: + for line in stdout.strip().splitlines(): + print(f" {line}") + print(" load_wasm_from_path: PASS") + wasm_load_ok = True + else: + print(f" FAIL (exit {proc.returncode})") + if stderr.strip(): + print(f" stderr: {stderr.strip()[:400]}") + wasm_load_ok = False + +except subprocess.TimeoutExpired: + if proc is not None: + proc.kill() + proc.wait() + print(f" SKIP: WASM compilation did not complete within {WASM_LOAD_TIMEOUT}s") + print( + " (wasmtime-cranelift compilation is extremely slow on ARM64/aarch64;" + " this is expected and does not indicate a code defect)" + ) + print(" load_wasm_from_path: SKIPPED (ARM64 timeout)") + wasm_load_ok = None # skipped, not failed + +# --------------------------------------------------------------------------- +# Summary +# --------------------------------------------------------------------------- + +print("\n" + "=" * 70) +if wasm_load_ok is True: + print("Python → PyO3 → Rust resolver → wasmtime → WASM tool pipeline: SUCCESS") +elif wasm_load_ok is None: + print("Python → PyO3 → Rust resolver pipeline: SUCCESS") + print( + "wasmtime WASM compilation: SKIPPED (ARM64 timeout)" + ) + print() + print("Resolver pipeline fully verified. Run on x86_64 for full end-to-end proof,") + print("or extend AMPLIFIER_WASM_LOAD_TIMEOUT for a longer ARM64 attempt.") +else: + print("Python → PyO3 → Rust resolver pipeline: SUCCESS") + print("wasmtime WASM load: FAIL") + sys.exit(1) diff --git a/examples/wasm-modules/calculator-tool.wasm b/examples/wasm-modules/calculator-tool.wasm new file mode 100644 index 0000000..213fd73 Binary files /dev/null and b/examples/wasm-modules/calculator-tool.wasm differ diff --git a/examples/wasm-modules/calculator-tool/Cargo.lock b/examples/wasm-modules/calculator-tool/Cargo.lock new file mode 100644 index 0000000..cb59a1f --- /dev/null +++ b/examples/wasm-modules/calculator-tool/Cargo.lock @@ -0,0 +1,861 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "amplifier-guest" +version = "0.1.0" +dependencies = [ + "prost", + "serde", + "serde_json", + "wit-bindgen", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "auditable-serde" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5" +dependencies = [ + "semver", + "serde", + "serde_json", + "topological-sort", +] + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "calculator-tool" +version = "0.1.0" +dependencies = [ + "amplifier-guest", + "serde_json", + "wit-bindgen-rt", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "spdx" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" +dependencies = [ + "smallvec", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "wasm-encoder" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d" +dependencies = [ + "anyhow", + "auditable-serde", + "flate2", + "indexmap", + "serde", + "serde_derive", + "serde_json", + "spdx", + "url", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "wit-bindgen" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de" +dependencies = [ + "wit-bindgen-rt", + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" +dependencies = [ + "bitflags", + "futures", + "once_cell", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/examples/wasm-modules/calculator-tool/Cargo.toml b/examples/wasm-modules/calculator-tool/Cargo.toml new file mode 100644 index 0000000..9d839b1 --- /dev/null +++ b/examples/wasm-modules/calculator-tool/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "calculator-tool" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../crates/amplifier-guest" } +serde_json = "1" +wit-bindgen-rt = "0.41" + +[package.metadata.component] +package = "amplifier:calculator-tool" + +[package.metadata.component.target] +world = "tool-module" +path = "wit" + +[workspace] diff --git a/examples/wasm-modules/calculator-tool/amplifier.toml b/examples/wasm-modules/calculator-tool/amplifier.toml new file mode 100644 index 0000000..5fc6c59 --- /dev/null +++ b/examples/wasm-modules/calculator-tool/amplifier.toml @@ -0,0 +1,3 @@ +[module] +transport = "wasm" +type = "tool" diff --git a/examples/wasm-modules/calculator-tool/src/bindings.rs b/examples/wasm-modules/calculator-tool/src/bindings.rs new file mode 100644 index 0000000..c37ae60 --- /dev/null +++ b/examples/wasm-modules/calculator-tool/src/bindings.rs @@ -0,0 +1,220 @@ +// Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! +// Options used: +// * runtime_path: "wit_bindgen_rt" +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod exports { + pub mod amplifier { + pub mod modules { + /// Tool module interface — exposes a single tool to the kernel. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod tool { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; + use super::super::super::super::_rt; + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_get_spec_cabi() -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let result0 = T::get_spec(); + let ptr1 = (&raw mut _RET_AREA.0).cast::(); + let vec2 = (result0).into_boxed_slice(); + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + ::core::mem::forget(vec2); + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::() = len2; + *ptr1.add(0).cast::<*mut u8>() = ptr2.cast_mut(); + ptr1 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_get_spec(arg0: *mut u8) { + let l0 = *arg0.add(0).cast::<*mut u8>(); + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let base2 = l0; + let len2 = l1; + _rt::cabi_dealloc(base2, len2 * 1, 1); + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_execute_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::execute( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_execute(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + pub trait Guest { + /// Return the tool specification (ToolSpec proto, serialized). + fn get_spec() -> _rt::Vec; + /// Execute the tool with proto-serialized input (ToolExecuteRequest). + /// Returns proto-serialized ToolExecuteResponse on success. + fn execute(input: _rt::Vec) -> Result<_rt::Vec, _rt::String>; + } + #[doc(hidden)] + macro_rules! __export_amplifier_modules_tool_1_0_0_cabi { + ($ty:ident with_types_in $($path_to_types:tt)*) => { + const _ : () = { #[unsafe (export_name = + "amplifier:modules/tool@1.0.0#get-spec")] unsafe extern "C" fn + export_get_spec() -> * mut u8 { unsafe { $($path_to_types)*:: + _export_get_spec_cabi::<$ty > () } } #[unsafe (export_name = + "cabi_post_amplifier:modules/tool@1.0.0#get-spec")] unsafe extern + "C" fn _post_return_get_spec(arg0 : * mut u8,) { unsafe { + $($path_to_types)*:: __post_return_get_spec::<$ty > (arg0) } } + #[unsafe (export_name = "amplifier:modules/tool@1.0.0#execute")] + unsafe extern "C" fn export_execute(arg0 : * mut u8, arg1 : + usize,) -> * mut u8 { unsafe { $($path_to_types)*:: + _export_execute_cabi::<$ty > (arg0, arg1) } } #[unsafe + (export_name = "cabi_post_amplifier:modules/tool@1.0.0#execute")] + unsafe extern "C" fn _post_return_execute(arg0 : * mut u8,) { + unsafe { $($path_to_types)*:: __post_return_execute::<$ty > + (arg0) } } }; + }; + } + #[doc(hidden)] + pub(crate) use __export_amplifier_modules_tool_1_0_0_cabi; + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct _RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + } + } + } +} +#[rustfmt::skip] +mod _rt { + #![allow(dead_code, clippy::all)] + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr, layout); + } + pub use alloc_crate::vec::Vec; + pub use alloc_crate::string::String; + pub use alloc_crate::alloc; + extern crate alloc as alloc_crate; +} +/// Generates `#[unsafe(no_mangle)]` functions to export the specified type as +/// the root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] +macro_rules! __export_tool_module_impl { + ($ty:ident) => { + self::export!($ty with_types_in self); + }; + ($ty:ident with_types_in $($path_to_types_root:tt)*) => { + $($path_to_types_root)*:: + exports::amplifier::modules::tool::__export_amplifier_modules_tool_1_0_0_cabi!($ty + with_types_in $($path_to_types_root)*:: exports::amplifier::modules::tool); + }; +} +#[doc(inline)] +pub(crate) use __export_tool_module_impl as export; +#[cfg(target_arch = "wasm32")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:amplifier:modules@1.0.0:tool-module:encoded world" +)] +#[doc(hidden)] +#[allow(clippy::octal_escapes)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 263] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\x85\x01\x01A\x02\x01\ +A\x02\x01B\x06\x01p}\x01@\0\0\0\x04\0\x08get-spec\x01\x01\x01j\x01\0\x01s\x01@\x01\ +\x05input\0\0\x02\x04\0\x07execute\x01\x03\x04\0\x1camplifier:modules/tool@1.0.0\ +\x05\0\x04\0#amplifier:modules/tool-module@1.0.0\x04\0\x0b\x11\x01\0\x0btool-mod\ +ule\x03\0\0\0G\x09producers\x01\x0cprocessed-by\x02\x0dwit-component\x070.227.1\x10\ +wit-bindgen-rust\x060.41.0"; +#[inline(never)] +#[doc(hidden)] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/examples/wasm-modules/calculator-tool/src/lib.rs b/examples/wasm-modules/calculator-tool/src/lib.rs new file mode 100644 index 0000000..77d6325 --- /dev/null +++ b/examples/wasm-modules/calculator-tool/src/lib.rs @@ -0,0 +1,96 @@ +#[allow(warnings)] +mod bindings; + +use amplifier_guest::{Tool, ToolResult, ToolSpec, Value}; +use std::collections::HashMap; + +#[derive(Default)] +struct CalculatorTool; + +impl Tool for CalculatorTool { + fn name(&self) -> &str { + "calculator" + } + + fn get_spec(&self) -> ToolSpec { + let mut params = HashMap::new(); + params.insert("type".to_string(), serde_json::json!("object")); + params.insert( + "properties".to_string(), + serde_json::json!({"expression": {"type": "string", "description": "A simple math expression like '2 + 3' or '10 / 5'"}}), + ); + params.insert( + "required".to_string(), + serde_json::json!(["expression"]), + ); + ToolSpec { + name: "calculator".to_string(), + parameters: params, + description: Some( + "Evaluates simple math expressions (a op b) supporting +, -, *, /".to_string(), + ), + } + } + + fn execute(&self, input: Value) -> Result { + let expression = input + .get("expression") + .and_then(|v| v.as_str()) + .ok_or_else(|| "missing 'expression' string parameter".to_string())?; + + match eval_simple(expression) { + Ok(result) => Ok(ToolResult { + success: true, + output: Some(serde_json::json!({ "result": result })), + error: None, + }), + Err(e) => Ok(ToolResult { + success: false, + output: None, + error: Some({ + let mut m = HashMap::new(); + m.insert("message".to_string(), serde_json::json!(e)); + m + }), + }), + } + } +} + +/// Evaluate a simple "a op b" expression. +/// +/// Supports +, -, *, / operators with f64 operands. +/// Returns an error for division by zero or malformed expressions. +fn eval_simple(expr: &str) -> Result { + let parts: Vec<&str> = expr.trim().split_whitespace().collect(); + if parts.len() != 3 { + return Err(format!( + "expected 'a op b' format (3 tokens), got {} tokens", + parts.len() + )); + } + + let a: f64 = parts[0] + .parse() + .map_err(|_| format!("invalid number: {}", parts[0]))?; + let op = parts[1]; + let b: f64 = parts[2] + .parse() + .map_err(|_| format!("invalid number: {}", parts[2]))?; + + match op { + "+" => Ok(a + b), + "-" => Ok(a - b), + "*" => Ok(a * b), + "/" => { + if b == 0.0 { + Err("division by zero".to_string()) + } else { + Ok(a / b) + } + } + _ => Err(format!("unsupported operator: {op}")), + } +} + +amplifier_guest::export_tool!(CalculatorTool); diff --git a/examples/wasm-modules/calculator-tool/wit/tool.wit b/examples/wasm-modules/calculator-tool/wit/tool.wit new file mode 100644 index 0000000..ff749c3 --- /dev/null +++ b/examples/wasm-modules/calculator-tool/wit/tool.wit @@ -0,0 +1,20 @@ +// Minimal WIT for tool-module world. +// Extracted from the main amplifier-modules.wit to avoid pulling in +// WASI HTTP dependencies that are only needed by the provider-module world. + +package amplifier:modules@1.0.0; + +/// Tool module interface — exposes a single tool to the kernel. +interface tool { + /// Return the tool specification (ToolSpec proto, serialized). + get-spec: func() -> list; + + /// Execute the tool with proto-serialized input (ToolExecuteRequest). + /// Returns proto-serialized ToolExecuteResponse on success. + execute: func(input: list) -> result, string>; +} + +/// Tier 1: Pure-compute tool module. +world tool-module { + export tool; +} diff --git a/proto/amplifier_module.proto b/proto/amplifier_module.proto index 61ed1d2..56f7bba 100644 --- a/proto/amplifier_module.proto +++ b/proto/amplifier_module.proto @@ -299,12 +299,12 @@ message ResponseFormat { // --- Token usage and degradation --- message Usage { - int32 prompt_tokens = 1; - int32 completion_tokens = 2; - int32 total_tokens = 3; - int32 reasoning_tokens = 4; - int32 cache_read_tokens = 5; - int32 cache_creation_tokens = 6; + int32 prompt_tokens = 1; + int32 completion_tokens = 2; + int32 total_tokens = 3; + optional int32 reasoning_tokens = 4; + optional int32 cache_read_tokens = 5; + optional int32 cache_creation_tokens = 6; } message Degradation { @@ -397,7 +397,7 @@ message HookResult { string approval_prompt = 7; repeated string approval_options = 8; // Default: 300.0 seconds (5 minutes). - double approval_timeout = 9; + optional double approval_timeout = 9; ApprovalDefault approval_default = 10; bool suppress_output = 11; string user_message = 12; @@ -425,11 +425,11 @@ message ProviderInfo { } message ApprovalRequest { - string tool_name = 1; - string action = 2; - string details_json = 3; - string risk_level = 4; - double timeout = 5; + string tool_name = 1; + string action = 2; + string details_json = 3; + string risk_level = 4; + optional double timeout = 5; } message ApprovalResponse { diff --git a/python/amplifier_core/_engine.pyi b/python/amplifier_core/_engine.pyi index b81cc00..beaee4f 100644 --- a/python/amplifier_core/_engine.pyi +++ b/python/amplifier_core/_engine.pyi @@ -331,6 +331,50 @@ class RetryConfig: # Retry utility functions (PyO3 bridge) # --------------------------------------------------------------------------- +# --------------------------------------------------------------------------- +# Module resolver functions (PyO3 bridge — Task 7/8) +# --------------------------------------------------------------------------- + +def resolve_module(source_path: str) -> dict[str, Any]: + """Resolve a module's manifest from its filesystem path. + + Reads the amplifier.toml in the given directory and returns a dict + containing at minimum a ``"transport"`` key (e.g. ``"python"``, + ``"grpc"``, ``"wasm"``, or ``"native"``). + + Raises: + ValueError: If the path cannot be resolved or the manifest is invalid. + """ + ... + +def load_wasm_from_path(source_path: str) -> dict[str, Any]: + """Load a WASM module from the given filesystem path. + + Returns a dict with ``"status"`` and ``"module_type"`` on success. + + Raises: + ValueError: If the path does not contain a valid WASM module. + """ + ... + +def load_and_mount_wasm(coordinator: RustCoordinator, path: str) -> dict[str, Any]: + """Load a WASM module and mount it into a coordinator's mount_points. + + Unlike ``load_wasm_from_path`` (which loads into a throwaway coordinator), + this function mounts the loaded module directly into the given coordinator's + ``mount_points`` dict, making it available for orchestrator use. + + Returns a dict with: + - ``"status"``: ``"mounted"`` if mounted, ``"loaded"`` if loaded but not auto-mounted + - ``"module_type"``: the detected module type string + - ``"name"``: the module name (for tool modules) + + Raises: + ValueError: If the path doesn't contain a WASM module. + RuntimeError: If engine creation or module loading fails. + """ + ... + def classify_error_message(message: str) -> str: """Classify an error message string into an error category. diff --git a/python/amplifier_core/loader_dispatch.py b/python/amplifier_core/loader_dispatch.py index 111dc4e..991c2ce 100644 --- a/python/amplifier_core/loader_dispatch.py +++ b/python/amplifier_core/loader_dispatch.py @@ -55,18 +55,18 @@ def _detect_transport(source_path: str) -> str: async def load_module( module_id: str, config: dict[str, Any] | None, - source_path: str, + source_path: str | None, coordinator: Any, ) -> Any: """Load a module from a resolved source path. - Checks for amplifier.toml to determine transport type. + Uses the Rust module resolver to auto-detect transport type. Falls back to Python loader for backward compatibility. Args: module_id: Module identifier (e.g., "tool-database") config: Optional module configuration dict - source_path: Resolved filesystem path to the module + source_path: Resolved filesystem path to the module (or None) coordinator: The coordinator instance (RustCoordinator or ModuleCoordinator) Returns: @@ -76,26 +76,54 @@ async def load_module( NotImplementedError: For transport types not yet supported ValueError: If module cannot be loaded """ - meta = _read_module_meta(source_path) - transport = meta.get("module", {}).get("transport", "python") if meta else "python" + # No source path means we can't detect transport — fall through to Python loader + if source_path is None: + from .loader import ModuleLoader + + loader = coordinator.loader or ModuleLoader(coordinator=coordinator) + return await loader.load(module_id, config, source_hint=None) + + try: + from amplifier_core._engine import resolve_module as rust_resolve + + manifest = rust_resolve(source_path) + transport = manifest.get("transport", "python") + except ImportError: + logger.debug("Rust engine not available, using Python-only transport detection") + transport = _detect_transport(source_path) + except Exception as e: + logger.debug( + f"Rust resolver failed for '{module_id}': {e}, falling back to Python detection" + ) + transport = _detect_transport(source_path) if transport == "grpc": from .loader_grpc import load_grpc_module + meta = _read_module_meta(source_path) return await load_grpc_module(module_id, config, meta, coordinator) + if transport == "wasm": + try: + from amplifier_core._engine import load_and_mount_wasm + + async def _wasm_mount(coord: Any) -> None: + result = load_and_mount_wasm(coord, source_path) + logger.info(f"[module:mount] {module_id} mounted via WASM: {result}") + + return _wasm_mount + except ImportError: + raise NotImplementedError( + f"WASM module loading for '{module_id}' requires the Rust engine. " + "Install amplifier-core with Rust extensions enabled." + ) + if transport == "native": raise NotImplementedError( f"Native Rust module loading not yet implemented for '{module_id}'. " "Use transport = 'grpc' to load Rust modules as gRPC services." ) - if transport == "wasm": - raise NotImplementedError( - f"WASM module loading not yet implemented for '{module_id}'. " - "Use transport = 'grpc' to load WASM modules as gRPC services." - ) - # Default: existing Python loader (backward compatible) from .loader import ModuleLoader diff --git a/python/tests/test_loader_dispatch_wasm.py b/python/tests/test_loader_dispatch_wasm.py new file mode 100644 index 0000000..611fc3e --- /dev/null +++ b/python/tests/test_loader_dispatch_wasm.py @@ -0,0 +1,106 @@ +"""Tests for WASM module mounting via loader_dispatch. + +Verifies that WASM modules loaded through loader_dispatch are actually +mounted into the coordinator's mount_points, not just loaded and discarded. + +Uses mocks to avoid slow WASM compilation on ARM64 while still verifying +the critical behavior: _noop_mount is replaced with a real bridge that +calls load_and_mount_wasm. +""" + +import os +import sys +import tempfile +from unittest.mock import MagicMock, patch + +import pytest + + +@pytest.fixture +def fixture_dir(): + """Create a temp directory referencing the echo-tool fixture location.""" + # Use the real fixture path for documentation clarity, but the mock + # means we won't actually read WASM files during the test. + fixture_base = os.path.join( + os.path.dirname(__file__), + "..", + "..", + "tests", + "fixtures", + "wasm", + ) + wasm_path = os.path.join(fixture_base, "echo-tool.wasm") + if not os.path.exists(wasm_path): + pytest.skip(f"WASM fixture not found: {wasm_path}") + + with tempfile.TemporaryDirectory() as tmpdir: + # Write an amplifier.toml so Python fallback detects wasm transport + toml_path = os.path.join(tmpdir, "amplifier.toml") + with open(toml_path, "w") as f: + f.write('[module]\ntransport = "wasm"\ntype = "tool"\n') + yield tmpdir + + +@pytest.mark.asyncio +async def test_wasm_tool_mounts_into_coordinator(fixture_dir): + """WASM tool loaded via loader_dispatch is actually registered in coordinator.mount_points['tools']. + + With the old _noop_mount, the mount function did nothing and the tool + was never registered. With the real bridge, load_and_mount_wasm is + called at mount time and the tool appears in mount_points['tools']. + """ + from amplifier_core.loader_dispatch import load_module + + # Mock coordinator with real mount_points dict structure + coordinator = MagicMock() + coordinator.loader = None + coordinator.mount_points = { + "orchestrator": None, + "providers": {}, + "tools": {}, + "context": None, + "hooks": MagicMock(), + "module-source-resolver": None, + } + + # Mock the Rust _engine module + fake_engine = MagicMock() + fake_engine.resolve_module.return_value = { + "transport": "wasm", + "name": "echo-tool", + } + + # Simulate what load_and_mount_wasm does: mount tool into coordinator + def fake_load_and_mount(coord, path): + tool_mock = MagicMock() + tool_mock.name = "echo-tool" + coord.mount_points["tools"]["echo-tool"] = tool_mock + return {"status": "mounted", "module_type": "tool", "name": "echo-tool"} + + fake_engine.load_and_mount_wasm = MagicMock(side_effect=fake_load_and_mount) + # Also provide load_wasm_from_path for backward compat (old code path) + fake_engine.load_wasm_from_path.return_value = { + "status": "loaded", + "module_type": "tool", + } + + with patch.dict(sys.modules, {"amplifier_core._engine": fake_engine}): + mount_fn = await load_module("echo-tool", {}, fixture_dir, coordinator) + + # mount_fn must be callable + assert callable(mount_fn) + + # Before calling mount: tools should still be empty + assert "echo-tool" not in coordinator.mount_points["tools"] + + # Call the mount function — this is where the tool gets registered + await mount_fn(coordinator) # type: ignore[misc] + + # The tool must now be in the coordinator's mount_points + tools = coordinator.mount_points["tools"] + assert "echo-tool" in tools, ( + f"'echo-tool' not found in mount_points['tools']. Keys: {list(tools.keys())}" + ) + + # Verify load_and_mount_wasm was called with the coordinator and path + fake_engine.load_and_mount_wasm.assert_called_once_with(coordinator, fixture_dir) diff --git a/tests/fixtures/wasm/auto-approve.wasm b/tests/fixtures/wasm/auto-approve.wasm new file mode 100644 index 0000000..fc27dd7 Binary files /dev/null and b/tests/fixtures/wasm/auto-approve.wasm differ diff --git a/tests/fixtures/wasm/build-fixtures.sh b/tests/fixtures/wasm/build-fixtures.sh new file mode 100755 index 0000000..48728ff --- /dev/null +++ b/tests/fixtures/wasm/build-fixtures.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# Recompile all WASM test fixtures from source. +# +# Run from the amplifier-core root: +# bash tests/fixtures/wasm/build-fixtures.sh + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +FIXTURES_DIR="$SCRIPT_DIR" +SRC_DIR="$FIXTURES_DIR/src" + +echo "=== Building WASM test fixtures ===" + +for module_dir in "$SRC_DIR"/*/; do + module_name=$(basename "$module_dir") + echo "--- Building $module_name ---" + (cd "$module_dir" && cargo component build --release) + + # Find the .wasm output + wasm_file=$(find "$module_dir/target" -name "*.wasm" -path "*/release/*" | head -1) + if [ -z "$wasm_file" ]; then + echo "ERROR: No .wasm file found for $module_name" + exit 1 + fi + + # Copy to fixtures directory with kebab-case name + cp "$wasm_file" "$FIXTURES_DIR/$module_name.wasm" + echo " -> $FIXTURES_DIR/$module_name.wasm ($(wc -c < "$FIXTURES_DIR/$module_name.wasm") bytes)" +done + +echo "=== All fixtures built successfully ===" diff --git a/tests/fixtures/wasm/deny-hook.wasm b/tests/fixtures/wasm/deny-hook.wasm new file mode 100644 index 0000000..8bf24df Binary files /dev/null and b/tests/fixtures/wasm/deny-hook.wasm differ diff --git a/tests/fixtures/wasm/echo-provider.wasm b/tests/fixtures/wasm/echo-provider.wasm new file mode 100644 index 0000000..d047493 Binary files /dev/null and b/tests/fixtures/wasm/echo-provider.wasm differ diff --git a/tests/fixtures/wasm/echo-tool.wasm b/tests/fixtures/wasm/echo-tool.wasm new file mode 100644 index 0000000..78afa2c Binary files /dev/null and b/tests/fixtures/wasm/echo-tool.wasm differ diff --git a/tests/fixtures/wasm/infinite-loop.wasm b/tests/fixtures/wasm/infinite-loop.wasm new file mode 100644 index 0000000..fb1fdc9 Binary files /dev/null and b/tests/fixtures/wasm/infinite-loop.wasm differ diff --git a/tests/fixtures/wasm/memory-context.wasm b/tests/fixtures/wasm/memory-context.wasm new file mode 100644 index 0000000..426b2d0 Binary files /dev/null and b/tests/fixtures/wasm/memory-context.wasm differ diff --git a/tests/fixtures/wasm/passthrough-orchestrator.wasm b/tests/fixtures/wasm/passthrough-orchestrator.wasm new file mode 100644 index 0000000..b5bcdcf Binary files /dev/null and b/tests/fixtures/wasm/passthrough-orchestrator.wasm differ diff --git a/tests/fixtures/wasm/src/auto-approve/.gitignore b/tests/fixtures/wasm/src/auto-approve/.gitignore new file mode 100644 index 0000000..b83d222 --- /dev/null +++ b/tests/fixtures/wasm/src/auto-approve/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/tests/fixtures/wasm/src/auto-approve/Cargo.toml b/tests/fixtures/wasm/src/auto-approve/Cargo.toml new file mode 100644 index 0000000..54eeb57 --- /dev/null +++ b/tests/fixtures/wasm/src/auto-approve/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "auto-approve" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../../crates/amplifier-guest" } +serde_json = "1" +wit-bindgen-rt = "0.41" + +[package.metadata.component] +package = "amplifier:auto-approve" + +[package.metadata.component.target] +world = "approval-module" +path = "wit" + +[workspace] diff --git a/tests/fixtures/wasm/src/auto-approve/amplifier.toml b/tests/fixtures/wasm/src/auto-approve/amplifier.toml new file mode 100644 index 0000000..6017556 --- /dev/null +++ b/tests/fixtures/wasm/src/auto-approve/amplifier.toml @@ -0,0 +1,4 @@ +[module] +transport = "wasm" +type = "approval" + diff --git a/tests/fixtures/wasm/src/auto-approve/src/bindings.rs b/tests/fixtures/wasm/src/auto-approve/src/bindings.rs new file mode 100644 index 0000000..454938c --- /dev/null +++ b/tests/fixtures/wasm/src/auto-approve/src/bindings.rs @@ -0,0 +1,191 @@ +// Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! +// Options used: +// * runtime_path: "wit_bindgen_rt" +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod exports { + pub mod amplifier { + pub mod modules { + /// Approval provider interface — human-in-the-loop approval gate. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod approval_provider { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; + use super::super::super::super::_rt; + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_request_approval_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::request_approval( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_request_approval(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + pub trait Guest { + /// Request approval from the user (ApprovalRequest proto, serialized). + /// Returns proto-serialized ApprovalResponse on success. + fn request_approval( + request: _rt::Vec, + ) -> Result<_rt::Vec, _rt::String>; + } + #[doc(hidden)] + macro_rules! __export_amplifier_modules_approval_provider_1_0_0_cabi { + ($ty:ident with_types_in $($path_to_types:tt)*) => { + const _ : () = { #[unsafe (export_name = + "amplifier:modules/approval-provider@1.0.0#request-approval")] + unsafe extern "C" fn export_request_approval(arg0 : * mut u8, + arg1 : usize,) -> * mut u8 { unsafe { $($path_to_types)*:: + _export_request_approval_cabi::<$ty > (arg0, arg1) } } #[unsafe + (export_name = + "cabi_post_amplifier:modules/approval-provider@1.0.0#request-approval")] + unsafe extern "C" fn _post_return_request_approval(arg0 : * mut + u8,) { unsafe { $($path_to_types)*:: + __post_return_request_approval::<$ty > (arg0) } } }; + }; + } + #[doc(hidden)] + pub(crate) use __export_amplifier_modules_approval_provider_1_0_0_cabi; + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct _RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + } + } + } +} +#[rustfmt::skip] +mod _rt { + #![allow(dead_code, clippy::all)] + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub use alloc_crate::vec::Vec; + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr, layout); + } + pub use alloc_crate::string::String; + extern crate alloc as alloc_crate; + pub use alloc_crate::alloc; +} +/// Generates `#[unsafe(no_mangle)]` functions to export the specified type as +/// the root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] +macro_rules! __export_approval_module_impl { + ($ty:ident) => { + self::export!($ty with_types_in self); + }; + ($ty:ident with_types_in $($path_to_types_root:tt)*) => { + $($path_to_types_root)*:: + exports::amplifier::modules::approval_provider::__export_amplifier_modules_approval_provider_1_0_0_cabi!($ty + with_types_in $($path_to_types_root)*:: + exports::amplifier::modules::approval_provider); + }; +} +#[doc(inline)] +pub(crate) use __export_approval_module_impl as export; +#[cfg(target_arch = "wasm32")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:amplifier:modules@1.0.0:approval-module:encoded world" +)] +#[doc(hidden)] +#[allow(clippy::octal_escapes)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 277] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\x8f\x01\x01A\x02\x01\ +A\x02\x01B\x04\x01p}\x01j\x01\0\x01s\x01@\x01\x07request\0\0\x01\x04\0\x10reques\ +t-approval\x01\x02\x04\0)amplifier:modules/approval-provider@1.0.0\x05\0\x04\0'a\ +mplifier:modules/approval-module@1.0.0\x04\0\x0b\x15\x01\0\x0fapproval-module\x03\ +\0\0\0G\x09producers\x01\x0cprocessed-by\x02\x0dwit-component\x070.227.1\x10wit-\ +bindgen-rust\x060.41.0"; +#[inline(never)] +#[doc(hidden)] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/tests/fixtures/wasm/src/auto-approve/src/lib.rs b/tests/fixtures/wasm/src/auto-approve/src/lib.rs new file mode 100644 index 0000000..5188291 --- /dev/null +++ b/tests/fixtures/wasm/src/auto-approve/src/lib.rs @@ -0,0 +1,19 @@ +#[allow(warnings)] +mod bindings; + +use amplifier_guest::{ApprovalProvider, ApprovalRequest, ApprovalResponse}; + +#[derive(Default)] +struct AutoApprove; + +impl ApprovalProvider for AutoApprove { + fn request_approval(&self, _request: ApprovalRequest) -> Result { + Ok(ApprovalResponse { + approved: true, + reason: Some("Auto-approved by WASM module".to_string()), + remember: false, + }) + } +} + +amplifier_guest::export_approval!(AutoApprove); diff --git a/tests/fixtures/wasm/src/auto-approve/wit/approval.wit b/tests/fixtures/wasm/src/auto-approve/wit/approval.wit new file mode 100644 index 0000000..428d820 --- /dev/null +++ b/tests/fixtures/wasm/src/auto-approve/wit/approval.wit @@ -0,0 +1,17 @@ +// Minimal WIT for approval-module world. +// Extracted from the main amplifier-modules.wit to avoid pulling in +// WASI HTTP dependencies that are only needed by the provider-module world. + +package amplifier:modules@1.0.0; + +/// Approval provider interface — human-in-the-loop approval gate. +interface approval-provider { + /// Request approval from the user (ApprovalRequest proto, serialized). + /// Returns proto-serialized ApprovalResponse on success. + request-approval: func(request: list) -> result, string>; +} + +/// Tier 1: Pure-compute approval provider module. +world approval-module { + export approval-provider; +} diff --git a/tests/fixtures/wasm/src/deny-hook/Cargo.toml b/tests/fixtures/wasm/src/deny-hook/Cargo.toml new file mode 100644 index 0000000..7c9c0b6 --- /dev/null +++ b/tests/fixtures/wasm/src/deny-hook/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "deny-hook" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../../crates/amplifier-guest" } +serde_json = "1" +wit-bindgen-rt = "0.41" + +[package.metadata.component] +package = "amplifier:deny-hook" + +[package.metadata.component.target] +world = "hook-module" +path = "wit" + +[workspace] diff --git a/tests/fixtures/wasm/src/deny-hook/amplifier.toml b/tests/fixtures/wasm/src/deny-hook/amplifier.toml new file mode 100644 index 0000000..436ffaf --- /dev/null +++ b/tests/fixtures/wasm/src/deny-hook/amplifier.toml @@ -0,0 +1,4 @@ +[module] +transport = "wasm" +type = "hook" + diff --git a/tests/fixtures/wasm/src/deny-hook/src/bindings.rs b/tests/fixtures/wasm/src/deny-hook/src/bindings.rs new file mode 100644 index 0000000..90fac86 --- /dev/null +++ b/tests/fixtures/wasm/src/deny-hook/src/bindings.rs @@ -0,0 +1,186 @@ +// Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! +// Options used: +// * runtime_path: "wit_bindgen_rt" +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod exports { + pub mod amplifier { + pub mod modules { + /// Hook handler interface — responds to lifecycle events. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod hook_handler { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; + use super::super::super::super::_rt; + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_handle_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::handle( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_handle(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + pub trait Guest { + /// Handle a lifecycle event (HookHandleRequest proto, serialized). + /// Returns proto-serialized HookResult on success. + fn handle(event: _rt::Vec) -> Result<_rt::Vec, _rt::String>; + } + #[doc(hidden)] + macro_rules! __export_amplifier_modules_hook_handler_1_0_0_cabi { + ($ty:ident with_types_in $($path_to_types:tt)*) => { + const _ : () = { #[unsafe (export_name = + "amplifier:modules/hook-handler@1.0.0#handle")] unsafe extern "C" + fn export_handle(arg0 : * mut u8, arg1 : usize,) -> * mut u8 { + unsafe { $($path_to_types)*:: _export_handle_cabi::<$ty > (arg0, + arg1) } } #[unsafe (export_name = + "cabi_post_amplifier:modules/hook-handler@1.0.0#handle")] unsafe + extern "C" fn _post_return_handle(arg0 : * mut u8,) { unsafe { + $($path_to_types)*:: __post_return_handle::<$ty > (arg0) } } }; + }; + } + #[doc(hidden)] + pub(crate) use __export_amplifier_modules_hook_handler_1_0_0_cabi; + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct _RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + } + } + } +} +#[rustfmt::skip] +mod _rt { + #![allow(dead_code, clippy::all)] + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub use alloc_crate::vec::Vec; + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr, layout); + } + pub use alloc_crate::string::String; + extern crate alloc as alloc_crate; + pub use alloc_crate::alloc; +} +/// Generates `#[unsafe(no_mangle)]` functions to export the specified type as +/// the root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] +macro_rules! __export_hook_module_impl { + ($ty:ident) => { + self::export!($ty with_types_in self); + }; + ($ty:ident with_types_in $($path_to_types_root:tt)*) => { + $($path_to_types_root)*:: + exports::amplifier::modules::hook_handler::__export_amplifier_modules_hook_handler_1_0_0_cabi!($ty + with_types_in $($path_to_types_root)*:: + exports::amplifier::modules::hook_handler); + }; +} +#[doc(inline)] +pub(crate) use __export_hook_module_impl as export; +#[cfg(target_arch = "wasm32")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:amplifier:modules@1.0.0:hook-module:encoded world" +)] +#[doc(hidden)] +#[allow(clippy::octal_escapes)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 251] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07z\x01A\x02\x01A\x02\x01\ +B\x04\x01p}\x01j\x01\0\x01s\x01@\x01\x05event\0\0\x01\x04\0\x06handle\x01\x02\x04\ +\0$amplifier:modules/hook-handler@1.0.0\x05\0\x04\0#amplifier:modules/hook-modul\ +e@1.0.0\x04\0\x0b\x11\x01\0\x0bhook-module\x03\0\0\0G\x09producers\x01\x0cproces\ +sed-by\x02\x0dwit-component\x070.227.1\x10wit-bindgen-rust\x060.41.0"; +#[inline(never)] +#[doc(hidden)] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/tests/fixtures/wasm/src/deny-hook/src/lib.rs b/tests/fixtures/wasm/src/deny-hook/src/lib.rs new file mode 100644 index 0000000..e733e8f --- /dev/null +++ b/tests/fixtures/wasm/src/deny-hook/src/lib.rs @@ -0,0 +1,19 @@ +#[allow(warnings)] +mod bindings; + +use amplifier_guest::{HookAction, HookHandler, HookResult, Value}; + +#[derive(Default)] +struct DenyHook; + +impl HookHandler for DenyHook { + fn handle(&self, _event: &str, _data: Value) -> Result { + Ok(HookResult { + action: HookAction::Deny, + reason: Some("Denied by WASM hook".to_string()), + ..Default::default() + }) + } +} + +amplifier_guest::export_hook!(DenyHook); diff --git a/tests/fixtures/wasm/src/deny-hook/wit/hook.wit b/tests/fixtures/wasm/src/deny-hook/wit/hook.wit new file mode 100644 index 0000000..2a82d70 --- /dev/null +++ b/tests/fixtures/wasm/src/deny-hook/wit/hook.wit @@ -0,0 +1,17 @@ +// Minimal WIT for hook-module world. +// Extracted from the main amplifier-modules.wit to avoid pulling in +// WASI HTTP dependencies that are only needed by the provider-module world. + +package amplifier:modules@1.0.0; + +/// Hook handler interface — responds to lifecycle events. +interface hook-handler { + /// Handle a lifecycle event (HookHandleRequest proto, serialized). + /// Returns proto-serialized HookResult on success. + handle: func(event: list) -> result, string>; +} + +/// Tier 1: Pure-compute hook handler module. +world hook-module { + export hook-handler; +} diff --git a/tests/fixtures/wasm/src/echo-provider/.gitignore b/tests/fixtures/wasm/src/echo-provider/.gitignore new file mode 100644 index 0000000..b83d222 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-provider/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/tests/fixtures/wasm/src/echo-provider/Cargo.lock b/tests/fixtures/wasm/src/echo-provider/Cargo.lock new file mode 100644 index 0000000..ac5e60f --- /dev/null +++ b/tests/fixtures/wasm/src/echo-provider/Cargo.lock @@ -0,0 +1,861 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "amplifier-guest" +version = "0.1.0" +dependencies = [ + "prost", + "serde", + "serde_json", + "wit-bindgen", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "auditable-serde" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5" +dependencies = [ + "semver", + "serde", + "serde_json", + "topological-sort", +] + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "echo-provider" +version = "0.1.0" +dependencies = [ + "amplifier-guest", + "serde_json", + "wit-bindgen-rt", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "spdx" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" +dependencies = [ + "smallvec", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "wasm-encoder" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d" +dependencies = [ + "anyhow", + "auditable-serde", + "flate2", + "indexmap", + "serde", + "serde_derive", + "serde_json", + "spdx", + "url", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "wit-bindgen" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de" +dependencies = [ + "wit-bindgen-rt", + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" +dependencies = [ + "bitflags", + "futures", + "once_cell", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/tests/fixtures/wasm/src/echo-provider/Cargo.toml b/tests/fixtures/wasm/src/echo-provider/Cargo.toml new file mode 100644 index 0000000..3111093 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-provider/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "echo-provider" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../../crates/amplifier-guest" } +serde_json = "1" +wit-bindgen-rt = "0.41" + +[package.metadata.component] +package = "amplifier:echo-provider" + +[package.metadata.component.target] +world = "provider-module" +path = "wit" + +[workspace] diff --git a/tests/fixtures/wasm/src/echo-provider/amplifier.toml b/tests/fixtures/wasm/src/echo-provider/amplifier.toml new file mode 100644 index 0000000..72d2367 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-provider/amplifier.toml @@ -0,0 +1,4 @@ +[module] +transport = "wasm" +type = "provider" + diff --git a/tests/fixtures/wasm/src/echo-provider/src/bindings.rs b/tests/fixtures/wasm/src/echo-provider/src/bindings.rs new file mode 100644 index 0000000..bc677d9 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-provider/src/bindings.rs @@ -0,0 +1,379 @@ +// Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! +// Options used: +// * runtime_path: "wit_bindgen_rt" +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod exports { + pub mod amplifier { + pub mod modules { + /// Provider interface — LLM completions in any language. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod provider { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; + use super::super::super::super::_rt; + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_get_info_cabi() -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let result0 = T::get_info(); + let ptr1 = (&raw mut _RET_AREA.0).cast::(); + let vec2 = (result0).into_boxed_slice(); + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + ::core::mem::forget(vec2); + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::() = len2; + *ptr1.add(0).cast::<*mut u8>() = ptr2.cast_mut(); + ptr1 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_get_info(arg0: *mut u8) { + let l0 = *arg0.add(0).cast::<*mut u8>(); + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let base2 = l0; + let len2 = l1; + _rt::cabi_dealloc(base2, len2 * 1, 1); + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_list_models_cabi() -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let result0 = T::list_models(); + let ptr1 = (&raw mut _RET_AREA.0).cast::(); + match result0 { + Ok(e) => { + *ptr1.add(0).cast::() = (0i32) as u8; + let vec2 = (e).into_boxed_slice(); + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + ::core::mem::forget(vec2); + *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len2; + *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr2.cast_mut(); + } + Err(e) => { + *ptr1.add(0).cast::() = (1i32) as u8; + let vec3 = (e.into_bytes()).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + }; + ptr1 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_list_models(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_complete_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::complete( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_complete(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_parse_tool_calls_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::parse_tool_calls( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_parse_tool_calls(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + pub trait Guest { + /// Return provider metadata (ProviderInfo proto, serialized). + fn get_info() -> _rt::Vec; + /// List available models. Returns ListModelsResponse proto. + fn list_models() -> Result<_rt::Vec, _rt::String>; + /// Generate a completion (ChatRequest proto → ChatResponse proto). + fn complete( + request: _rt::Vec, + ) -> Result<_rt::Vec, _rt::String>; + /// Extract tool calls from a response (ChatResponse proto → + /// ParseToolCallsResponse proto). + fn parse_tool_calls( + response: _rt::Vec, + ) -> Result<_rt::Vec, _rt::String>; + } + #[doc(hidden)] + macro_rules! __export_amplifier_modules_provider_1_0_0_cabi { + ($ty:ident with_types_in $($path_to_types:tt)*) => { + const _ : () = { #[unsafe (export_name = + "amplifier:modules/provider@1.0.0#get-info")] unsafe extern "C" + fn export_get_info() -> * mut u8 { unsafe { $($path_to_types)*:: + _export_get_info_cabi::<$ty > () } } #[unsafe (export_name = + "cabi_post_amplifier:modules/provider@1.0.0#get-info")] unsafe + extern "C" fn _post_return_get_info(arg0 : * mut u8,) { unsafe { + $($path_to_types)*:: __post_return_get_info::<$ty > (arg0) } } + #[unsafe (export_name = + "amplifier:modules/provider@1.0.0#list-models")] unsafe extern + "C" fn export_list_models() -> * mut u8 { unsafe { + $($path_to_types)*:: _export_list_models_cabi::<$ty > () } } + #[unsafe (export_name = + "cabi_post_amplifier:modules/provider@1.0.0#list-models")] unsafe + extern "C" fn _post_return_list_models(arg0 : * mut u8,) { unsafe + { $($path_to_types)*:: __post_return_list_models::<$ty > (arg0) } + } #[unsafe (export_name = + "amplifier:modules/provider@1.0.0#complete")] unsafe extern "C" + fn export_complete(arg0 : * mut u8, arg1 : usize,) -> * mut u8 { + unsafe { $($path_to_types)*:: _export_complete_cabi::<$ty > + (arg0, arg1) } } #[unsafe (export_name = + "cabi_post_amplifier:modules/provider@1.0.0#complete")] unsafe + extern "C" fn _post_return_complete(arg0 : * mut u8,) { unsafe { + $($path_to_types)*:: __post_return_complete::<$ty > (arg0) } } + #[unsafe (export_name = + "amplifier:modules/provider@1.0.0#parse-tool-calls")] unsafe + extern "C" fn export_parse_tool_calls(arg0 : * mut u8, arg1 : + usize,) -> * mut u8 { unsafe { $($path_to_types)*:: + _export_parse_tool_calls_cabi::<$ty > (arg0, arg1) } } #[unsafe + (export_name = + "cabi_post_amplifier:modules/provider@1.0.0#parse-tool-calls")] + unsafe extern "C" fn _post_return_parse_tool_calls(arg0 : * mut + u8,) { unsafe { $($path_to_types)*:: + __post_return_parse_tool_calls::<$ty > (arg0) } } }; + }; + } + #[doc(hidden)] + pub(crate) use __export_amplifier_modules_provider_1_0_0_cabi; + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct _RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + } + } + } +} +#[rustfmt::skip] +mod _rt { + #![allow(dead_code, clippy::all)] + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr, layout); + } + pub use alloc_crate::vec::Vec; + pub use alloc_crate::string::String; + pub use alloc_crate::alloc; + extern crate alloc as alloc_crate; +} +/// Generates `#[unsafe(no_mangle)]` functions to export the specified type as +/// the root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] +macro_rules! __export_provider_module_impl { + ($ty:ident) => { + self::export!($ty with_types_in self); + }; + ($ty:ident with_types_in $($path_to_types_root:tt)*) => { + $($path_to_types_root)*:: + exports::amplifier::modules::provider::__export_amplifier_modules_provider_1_0_0_cabi!($ty + with_types_in $($path_to_types_root)*:: exports::amplifier::modules::provider); + }; +} +#[doc(inline)] +pub(crate) use __export_provider_module_impl as export; +#[cfg(target_arch = "wasm32")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:amplifier:modules@1.0.0:provider-module:encoded world" +)] +#[doc(hidden)] +#[allow(clippy::octal_escapes)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 335] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\xc9\x01\x01A\x02\x01\ +A\x02\x01B\x0a\x01p}\x01@\0\0\0\x04\0\x08get-info\x01\x01\x01j\x01\0\x01s\x01@\0\ +\0\x02\x04\0\x0blist-models\x01\x03\x01@\x01\x07request\0\0\x02\x04\0\x08complet\ +e\x01\x04\x01@\x01\x08response\0\0\x02\x04\0\x10parse-tool-calls\x01\x05\x04\0\x20\ +amplifier:modules/provider@1.0.0\x05\0\x04\0'amplifier:modules/provider-module@1\ +.0.0\x04\0\x0b\x15\x01\0\x0fprovider-module\x03\0\0\0G\x09producers\x01\x0cproce\ +ssed-by\x02\x0dwit-component\x070.227.1\x10wit-bindgen-rust\x060.41.0"; +#[inline(never)] +#[doc(hidden)] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/tests/fixtures/wasm/src/echo-provider/src/lib.rs b/tests/fixtures/wasm/src/echo-provider/src/lib.rs new file mode 100644 index 0000000..4650eca --- /dev/null +++ b/tests/fixtures/wasm/src/echo-provider/src/lib.rs @@ -0,0 +1,54 @@ +#[allow(warnings)] +mod bindings; + +use amplifier_guest::{ChatResponse, ModelInfo, Provider, ProviderInfo}; +use serde_json::Value; +use std::collections::HashMap; + +#[derive(Default)] +struct EchoProvider; + +impl Provider for EchoProvider { + fn name(&self) -> &str { + "echo-provider" + } + + fn get_info(&self) -> ProviderInfo { + ProviderInfo { + id: "echo-provider".to_string(), + display_name: "Echo Provider".to_string(), + credential_env_vars: vec![], + capabilities: vec!["chat".to_string()], + defaults: HashMap::new(), + } + } + + fn list_models(&self) -> Result, String> { + Ok(vec![ModelInfo { + id: "echo-model".to_string(), + display_name: "Echo Model".to_string(), + context_window: 4096, + max_output_tokens: 1024, + capabilities: vec!["chat".to_string()], + defaults: HashMap::new(), + }]) + } + + fn complete(&self, _request: Value) -> Result { + Ok(ChatResponse { + content: vec![serde_json::json!({ + "type": "text", + "text": "Echo response from WASM provider" + })], + tool_calls: None, + finish_reason: Some("stop".to_string()), + extra: HashMap::new(), + }) + } + + fn parse_tool_calls(&self, _response: &ChatResponse) -> Vec { + vec![] + } +} + +amplifier_guest::export_provider!(EchoProvider); diff --git a/tests/fixtures/wasm/src/echo-provider/wit/provider.wit b/tests/fixtures/wasm/src/echo-provider/wit/provider.wit new file mode 100644 index 0000000..4e5c3bb --- /dev/null +++ b/tests/fixtures/wasm/src/echo-provider/wit/provider.wit @@ -0,0 +1,26 @@ +// Minimal WIT for provider-module world. +// Extracted from the main amplifier-modules.wit without the WASI HTTP import, +// since the echo-provider is a pure-compute fixture (no real HTTP needed). + +package amplifier:modules@1.0.0; + +/// Provider interface — LLM completions in any language. +interface provider { + /// Return provider metadata (ProviderInfo proto, serialized). + get-info: func() -> list; + + /// List available models. Returns ListModelsResponse proto. + list-models: func() -> result, string>; + + /// Generate a completion (ChatRequest proto → ChatResponse proto). + complete: func(request: list) -> result, string>; + + /// Extract tool calls from a response (ChatResponse proto → + /// ParseToolCallsResponse proto). + parse-tool-calls: func(response: list) -> result, string>; +} + +/// Tier 2: Provider module (echo variant — no HTTP import needed). +world provider-module { + export provider; +} diff --git a/tests/fixtures/wasm/src/echo-tool/Cargo.toml b/tests/fixtures/wasm/src/echo-tool/Cargo.toml new file mode 100644 index 0000000..c12c078 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-tool/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "echo-tool" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../../crates/amplifier-guest" } +serde_json = "1" +wit-bindgen-rt = "0.41" + +[package.metadata.component] +package = "amplifier:echo-tool" + +[package.metadata.component.target] +world = "tool-module" +path = "wit" + +[workspace] diff --git a/tests/fixtures/wasm/src/echo-tool/amplifier.toml b/tests/fixtures/wasm/src/echo-tool/amplifier.toml new file mode 100644 index 0000000..c7b9862 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-tool/amplifier.toml @@ -0,0 +1,4 @@ +[module] +transport = "wasm" +type = "tool" + diff --git a/tests/fixtures/wasm/src/echo-tool/src/bindings.rs b/tests/fixtures/wasm/src/echo-tool/src/bindings.rs new file mode 100644 index 0000000..c37ae60 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-tool/src/bindings.rs @@ -0,0 +1,220 @@ +// Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! +// Options used: +// * runtime_path: "wit_bindgen_rt" +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod exports { + pub mod amplifier { + pub mod modules { + /// Tool module interface — exposes a single tool to the kernel. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod tool { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; + use super::super::super::super::_rt; + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_get_spec_cabi() -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let result0 = T::get_spec(); + let ptr1 = (&raw mut _RET_AREA.0).cast::(); + let vec2 = (result0).into_boxed_slice(); + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + ::core::mem::forget(vec2); + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::() = len2; + *ptr1.add(0).cast::<*mut u8>() = ptr2.cast_mut(); + ptr1 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_get_spec(arg0: *mut u8) { + let l0 = *arg0.add(0).cast::<*mut u8>(); + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let base2 = l0; + let len2 = l1; + _rt::cabi_dealloc(base2, len2 * 1, 1); + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_execute_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::execute( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_execute(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + pub trait Guest { + /// Return the tool specification (ToolSpec proto, serialized). + fn get_spec() -> _rt::Vec; + /// Execute the tool with proto-serialized input (ToolExecuteRequest). + /// Returns proto-serialized ToolExecuteResponse on success. + fn execute(input: _rt::Vec) -> Result<_rt::Vec, _rt::String>; + } + #[doc(hidden)] + macro_rules! __export_amplifier_modules_tool_1_0_0_cabi { + ($ty:ident with_types_in $($path_to_types:tt)*) => { + const _ : () = { #[unsafe (export_name = + "amplifier:modules/tool@1.0.0#get-spec")] unsafe extern "C" fn + export_get_spec() -> * mut u8 { unsafe { $($path_to_types)*:: + _export_get_spec_cabi::<$ty > () } } #[unsafe (export_name = + "cabi_post_amplifier:modules/tool@1.0.0#get-spec")] unsafe extern + "C" fn _post_return_get_spec(arg0 : * mut u8,) { unsafe { + $($path_to_types)*:: __post_return_get_spec::<$ty > (arg0) } } + #[unsafe (export_name = "amplifier:modules/tool@1.0.0#execute")] + unsafe extern "C" fn export_execute(arg0 : * mut u8, arg1 : + usize,) -> * mut u8 { unsafe { $($path_to_types)*:: + _export_execute_cabi::<$ty > (arg0, arg1) } } #[unsafe + (export_name = "cabi_post_amplifier:modules/tool@1.0.0#execute")] + unsafe extern "C" fn _post_return_execute(arg0 : * mut u8,) { + unsafe { $($path_to_types)*:: __post_return_execute::<$ty > + (arg0) } } }; + }; + } + #[doc(hidden)] + pub(crate) use __export_amplifier_modules_tool_1_0_0_cabi; + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct _RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + } + } + } +} +#[rustfmt::skip] +mod _rt { + #![allow(dead_code, clippy::all)] + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr, layout); + } + pub use alloc_crate::vec::Vec; + pub use alloc_crate::string::String; + pub use alloc_crate::alloc; + extern crate alloc as alloc_crate; +} +/// Generates `#[unsafe(no_mangle)]` functions to export the specified type as +/// the root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] +macro_rules! __export_tool_module_impl { + ($ty:ident) => { + self::export!($ty with_types_in self); + }; + ($ty:ident with_types_in $($path_to_types_root:tt)*) => { + $($path_to_types_root)*:: + exports::amplifier::modules::tool::__export_amplifier_modules_tool_1_0_0_cabi!($ty + with_types_in $($path_to_types_root)*:: exports::amplifier::modules::tool); + }; +} +#[doc(inline)] +pub(crate) use __export_tool_module_impl as export; +#[cfg(target_arch = "wasm32")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:amplifier:modules@1.0.0:tool-module:encoded world" +)] +#[doc(hidden)] +#[allow(clippy::octal_escapes)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 263] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\x85\x01\x01A\x02\x01\ +A\x02\x01B\x06\x01p}\x01@\0\0\0\x04\0\x08get-spec\x01\x01\x01j\x01\0\x01s\x01@\x01\ +\x05input\0\0\x02\x04\0\x07execute\x01\x03\x04\0\x1camplifier:modules/tool@1.0.0\ +\x05\0\x04\0#amplifier:modules/tool-module@1.0.0\x04\0\x0b\x11\x01\0\x0btool-mod\ +ule\x03\0\0\0G\x09producers\x01\x0cprocessed-by\x02\x0dwit-component\x070.227.1\x10\ +wit-bindgen-rust\x060.41.0"; +#[inline(never)] +#[doc(hidden)] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/tests/fixtures/wasm/src/echo-tool/src/lib.rs b/tests/fixtures/wasm/src/echo-tool/src/lib.rs new file mode 100644 index 0000000..317c617 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-tool/src/lib.rs @@ -0,0 +1,38 @@ +#[allow(warnings)] +mod bindings; + +use amplifier_guest::{Tool, ToolSpec, ToolResult, Value}; +use std::collections::HashMap; + +#[derive(Default)] +struct EchoTool; + +impl Tool for EchoTool { + fn name(&self) -> &str { + "echo-tool" + } + + fn get_spec(&self) -> ToolSpec { + let mut params = HashMap::new(); + params.insert("type".to_string(), serde_json::json!("object")); + params.insert( + "properties".to_string(), + serde_json::json!({"input": {"type": "string"}}), + ); + ToolSpec { + name: "echo-tool".to_string(), + parameters: params, + description: Some("Echoes input back as output".to_string()), + } + } + + fn execute(&self, input: Value) -> Result { + Ok(ToolResult { + success: true, + output: Some(input), + error: None, + }) + } +} + +amplifier_guest::export_tool!(EchoTool); diff --git a/tests/fixtures/wasm/src/echo-tool/wit/tool.wit b/tests/fixtures/wasm/src/echo-tool/wit/tool.wit new file mode 100644 index 0000000..ff749c3 --- /dev/null +++ b/tests/fixtures/wasm/src/echo-tool/wit/tool.wit @@ -0,0 +1,20 @@ +// Minimal WIT for tool-module world. +// Extracted from the main amplifier-modules.wit to avoid pulling in +// WASI HTTP dependencies that are only needed by the provider-module world. + +package amplifier:modules@1.0.0; + +/// Tool module interface — exposes a single tool to the kernel. +interface tool { + /// Return the tool specification (ToolSpec proto, serialized). + get-spec: func() -> list; + + /// Execute the tool with proto-serialized input (ToolExecuteRequest). + /// Returns proto-serialized ToolExecuteResponse on success. + execute: func(input: list) -> result, string>; +} + +/// Tier 1: Pure-compute tool module. +world tool-module { + export tool; +} diff --git a/tests/fixtures/wasm/src/infinite-loop/Cargo.lock b/tests/fixtures/wasm/src/infinite-loop/Cargo.lock new file mode 100644 index 0000000..aae63b6 --- /dev/null +++ b/tests/fixtures/wasm/src/infinite-loop/Cargo.lock @@ -0,0 +1,861 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "amplifier-guest" +version = "0.1.0" +dependencies = [ + "prost", + "serde", + "serde_json", + "wit-bindgen", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "auditable-serde" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5" +dependencies = [ + "semver", + "serde", + "serde_json", + "topological-sort", +] + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "infinite-loop" +version = "0.1.0" +dependencies = [ + "amplifier-guest", + "serde_json", + "wit-bindgen-rt", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "spdx" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" +dependencies = [ + "smallvec", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "wasm-encoder" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d" +dependencies = [ + "anyhow", + "auditable-serde", + "flate2", + "indexmap", + "serde", + "serde_derive", + "serde_json", + "spdx", + "url", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "wit-bindgen" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de" +dependencies = [ + "wit-bindgen-rt", + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" +dependencies = [ + "bitflags", + "futures", + "once_cell", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/tests/fixtures/wasm/src/infinite-loop/Cargo.toml b/tests/fixtures/wasm/src/infinite-loop/Cargo.toml new file mode 100644 index 0000000..25e28be --- /dev/null +++ b/tests/fixtures/wasm/src/infinite-loop/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "infinite-loop" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../../crates/amplifier-guest" } +serde_json = "1" +wit-bindgen-rt = "0.41" + +[package.metadata.component] +package = "amplifier:infinite-loop" + +[package.metadata.component.target] +world = "tool-module" +path = "wit" + +[workspace] diff --git a/tests/fixtures/wasm/src/infinite-loop/src/bindings.rs b/tests/fixtures/wasm/src/infinite-loop/src/bindings.rs new file mode 100644 index 0000000..c37ae60 --- /dev/null +++ b/tests/fixtures/wasm/src/infinite-loop/src/bindings.rs @@ -0,0 +1,220 @@ +// Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! +// Options used: +// * runtime_path: "wit_bindgen_rt" +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod exports { + pub mod amplifier { + pub mod modules { + /// Tool module interface — exposes a single tool to the kernel. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod tool { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; + use super::super::super::super::_rt; + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_get_spec_cabi() -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let result0 = T::get_spec(); + let ptr1 = (&raw mut _RET_AREA.0).cast::(); + let vec2 = (result0).into_boxed_slice(); + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + ::core::mem::forget(vec2); + *ptr1.add(::core::mem::size_of::<*const u8>()).cast::() = len2; + *ptr1.add(0).cast::<*mut u8>() = ptr2.cast_mut(); + ptr1 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_get_spec(arg0: *mut u8) { + let l0 = *arg0.add(0).cast::<*mut u8>(); + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::(); + let base2 = l0; + let len2 = l1; + _rt::cabi_dealloc(base2, len2 * 1, 1); + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_execute_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::execute( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_execute(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + pub trait Guest { + /// Return the tool specification (ToolSpec proto, serialized). + fn get_spec() -> _rt::Vec; + /// Execute the tool with proto-serialized input (ToolExecuteRequest). + /// Returns proto-serialized ToolExecuteResponse on success. + fn execute(input: _rt::Vec) -> Result<_rt::Vec, _rt::String>; + } + #[doc(hidden)] + macro_rules! __export_amplifier_modules_tool_1_0_0_cabi { + ($ty:ident with_types_in $($path_to_types:tt)*) => { + const _ : () = { #[unsafe (export_name = + "amplifier:modules/tool@1.0.0#get-spec")] unsafe extern "C" fn + export_get_spec() -> * mut u8 { unsafe { $($path_to_types)*:: + _export_get_spec_cabi::<$ty > () } } #[unsafe (export_name = + "cabi_post_amplifier:modules/tool@1.0.0#get-spec")] unsafe extern + "C" fn _post_return_get_spec(arg0 : * mut u8,) { unsafe { + $($path_to_types)*:: __post_return_get_spec::<$ty > (arg0) } } + #[unsafe (export_name = "amplifier:modules/tool@1.0.0#execute")] + unsafe extern "C" fn export_execute(arg0 : * mut u8, arg1 : + usize,) -> * mut u8 { unsafe { $($path_to_types)*:: + _export_execute_cabi::<$ty > (arg0, arg1) } } #[unsafe + (export_name = "cabi_post_amplifier:modules/tool@1.0.0#execute")] + unsafe extern "C" fn _post_return_execute(arg0 : * mut u8,) { + unsafe { $($path_to_types)*:: __post_return_execute::<$ty > + (arg0) } } }; + }; + } + #[doc(hidden)] + pub(crate) use __export_amplifier_modules_tool_1_0_0_cabi; + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct _RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + } + } + } +} +#[rustfmt::skip] +mod _rt { + #![allow(dead_code, clippy::all)] + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr, layout); + } + pub use alloc_crate::vec::Vec; + pub use alloc_crate::string::String; + pub use alloc_crate::alloc; + extern crate alloc as alloc_crate; +} +/// Generates `#[unsafe(no_mangle)]` functions to export the specified type as +/// the root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] +macro_rules! __export_tool_module_impl { + ($ty:ident) => { + self::export!($ty with_types_in self); + }; + ($ty:ident with_types_in $($path_to_types_root:tt)*) => { + $($path_to_types_root)*:: + exports::amplifier::modules::tool::__export_amplifier_modules_tool_1_0_0_cabi!($ty + with_types_in $($path_to_types_root)*:: exports::amplifier::modules::tool); + }; +} +#[doc(inline)] +pub(crate) use __export_tool_module_impl as export; +#[cfg(target_arch = "wasm32")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:amplifier:modules@1.0.0:tool-module:encoded world" +)] +#[doc(hidden)] +#[allow(clippy::octal_escapes)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 263] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\x85\x01\x01A\x02\x01\ +A\x02\x01B\x06\x01p}\x01@\0\0\0\x04\0\x08get-spec\x01\x01\x01j\x01\0\x01s\x01@\x01\ +\x05input\0\0\x02\x04\0\x07execute\x01\x03\x04\0\x1camplifier:modules/tool@1.0.0\ +\x05\0\x04\0#amplifier:modules/tool-module@1.0.0\x04\0\x0b\x11\x01\0\x0btool-mod\ +ule\x03\0\0\0G\x09producers\x01\x0cprocessed-by\x02\x0dwit-component\x070.227.1\x10\ +wit-bindgen-rust\x060.41.0"; +#[inline(never)] +#[doc(hidden)] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/tests/fixtures/wasm/src/infinite-loop/src/lib.rs b/tests/fixtures/wasm/src/infinite-loop/src/lib.rs new file mode 100644 index 0000000..b77a01b --- /dev/null +++ b/tests/fixtures/wasm/src/infinite-loop/src/lib.rs @@ -0,0 +1,29 @@ +#[allow(warnings)] +mod bindings; + +use amplifier_guest::{Tool, ToolSpec, ToolResult, Value}; + +#[derive(Default)] +struct InfiniteLoopTool; + +impl Tool for InfiniteLoopTool { + fn name(&self) -> &str { + "infinite-loop" + } + + fn get_spec(&self) -> ToolSpec { + // Enter an infinite loop — epoch interruption should terminate this. + loop { + std::hint::black_box(()); + } + } + + fn execute(&self, _input: Value) -> Result { + // Also an infinite loop in execute, for completeness. + loop { + std::hint::black_box(()); + } + } +} + +amplifier_guest::export_tool!(InfiniteLoopTool); diff --git a/tests/fixtures/wasm/src/infinite-loop/wit/tool.wit b/tests/fixtures/wasm/src/infinite-loop/wit/tool.wit new file mode 100644 index 0000000..ff749c3 --- /dev/null +++ b/tests/fixtures/wasm/src/infinite-loop/wit/tool.wit @@ -0,0 +1,20 @@ +// Minimal WIT for tool-module world. +// Extracted from the main amplifier-modules.wit to avoid pulling in +// WASI HTTP dependencies that are only needed by the provider-module world. + +package amplifier:modules@1.0.0; + +/// Tool module interface — exposes a single tool to the kernel. +interface tool { + /// Return the tool specification (ToolSpec proto, serialized). + get-spec: func() -> list; + + /// Execute the tool with proto-serialized input (ToolExecuteRequest). + /// Returns proto-serialized ToolExecuteResponse on success. + execute: func(input: list) -> result, string>; +} + +/// Tier 1: Pure-compute tool module. +world tool-module { + export tool; +} diff --git a/tests/fixtures/wasm/src/memory-context/Cargo.toml b/tests/fixtures/wasm/src/memory-context/Cargo.toml new file mode 100644 index 0000000..02e92ee --- /dev/null +++ b/tests/fixtures/wasm/src/memory-context/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "memory-context" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../../crates/amplifier-guest" } +serde_json = "1" +wit-bindgen-rt = "0.41" + +[package.metadata.component] +package = "amplifier:memory-context" + +[package.metadata.component.target] +world = "context-module" +path = "wit" + +[workspace] diff --git a/tests/fixtures/wasm/src/memory-context/amplifier.toml b/tests/fixtures/wasm/src/memory-context/amplifier.toml new file mode 100644 index 0000000..02fcedb --- /dev/null +++ b/tests/fixtures/wasm/src/memory-context/amplifier.toml @@ -0,0 +1,4 @@ +[module] +transport = "wasm" +type = "context" + diff --git a/tests/fixtures/wasm/src/memory-context/src/bindings.rs b/tests/fixtures/wasm/src/memory-context/src/bindings.rs new file mode 100644 index 0000000..0e78322 --- /dev/null +++ b/tests/fixtures/wasm/src/memory-context/src/bindings.rs @@ -0,0 +1,441 @@ +// Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! +// Options used: +// * runtime_path: "wit_bindgen_rt" +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod exports { + pub mod amplifier { + pub mod modules { + /// Context manager interface — owns conversation memory policy. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod context_manager { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; + use super::super::super::super::_rt; + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_add_message_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::add_message( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(_) => { + *ptr2.add(0).cast::() = (0i32) as u8; + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec3 = (e.into_bytes()).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_add_message(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => {} + _ => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l1, l2, 1); + } + } + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_get_messages_cabi() -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let result0 = T::get_messages(); + let ptr1 = (&raw mut _RET_AREA.0).cast::(); + match result0 { + Ok(e) => { + *ptr1.add(0).cast::() = (0i32) as u8; + let vec2 = (e).into_boxed_slice(); + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + ::core::mem::forget(vec2); + *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len2; + *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr2.cast_mut(); + } + Err(e) => { + *ptr1.add(0).cast::() = (1i32) as u8; + let vec3 = (e.into_bytes()).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + }; + ptr1 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_get_messages(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_get_messages_for_request_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::get_messages_for_request( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_get_messages_for_request( + arg0: *mut u8, + ) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_set_messages_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::set_messages( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(_) => { + *ptr2.add(0).cast::() = (0i32) as u8; + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec3 = (e.into_bytes()).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_set_messages(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => {} + _ => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l1, l2, 1); + } + } + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_clear_cabi() -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let result0 = T::clear(); + let ptr1 = (&raw mut _RET_AREA.0).cast::(); + match result0 { + Ok(_) => { + *ptr1.add(0).cast::() = (0i32) as u8; + } + Err(e) => { + *ptr1.add(0).cast::() = (1i32) as u8; + let vec2 = (e.into_bytes()).into_boxed_slice(); + let ptr2 = vec2.as_ptr().cast::(); + let len2 = vec2.len(); + ::core::mem::forget(vec2); + *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len2; + *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr2.cast_mut(); + } + }; + ptr1 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_clear(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => {} + _ => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l1, l2, 1); + } + } + } + pub trait Guest { + /// Append a message to the context (Message proto, serialized). + fn add_message(message: _rt::Vec) -> Result<(), _rt::String>; + /// Get all messages (raw, uncompacted). Returns GetMessagesResponse proto. + fn get_messages() -> Result<_rt::Vec, _rt::String>; + /// Get messages for an LLM request (compacted). Accepts + /// GetMessagesForRequestParams proto, returns GetMessagesResponse proto. + fn get_messages_for_request( + params: _rt::Vec, + ) -> Result<_rt::Vec, _rt::String>; + /// Replace the entire message list (SetMessagesRequest proto). + fn set_messages(messages: _rt::Vec) -> Result<(), _rt::String>; + /// Clear all messages from context. + fn clear() -> Result<(), _rt::String>; + } + #[doc(hidden)] + macro_rules! __export_amplifier_modules_context_manager_1_0_0_cabi { + ($ty:ident with_types_in $($path_to_types:tt)*) => { + const _ : () = { #[unsafe (export_name = + "amplifier:modules/context-manager@1.0.0#add-message")] unsafe + extern "C" fn export_add_message(arg0 : * mut u8, arg1 : usize,) + -> * mut u8 { unsafe { $($path_to_types)*:: + _export_add_message_cabi::<$ty > (arg0, arg1) } } #[unsafe + (export_name = + "cabi_post_amplifier:modules/context-manager@1.0.0#add-message")] + unsafe extern "C" fn _post_return_add_message(arg0 : * mut u8,) { + unsafe { $($path_to_types)*:: __post_return_add_message::<$ty > + (arg0) } } #[unsafe (export_name = + "amplifier:modules/context-manager@1.0.0#get-messages")] unsafe + extern "C" fn export_get_messages() -> * mut u8 { unsafe { + $($path_to_types)*:: _export_get_messages_cabi::<$ty > () } } + #[unsafe (export_name = + "cabi_post_amplifier:modules/context-manager@1.0.0#get-messages")] + unsafe extern "C" fn _post_return_get_messages(arg0 : * mut u8,) + { unsafe { $($path_to_types)*:: __post_return_get_messages::<$ty + > (arg0) } } #[unsafe (export_name = + "amplifier:modules/context-manager@1.0.0#get-messages-for-request")] + unsafe extern "C" fn export_get_messages_for_request(arg0 : * mut + u8, arg1 : usize,) -> * mut u8 { unsafe { $($path_to_types)*:: + _export_get_messages_for_request_cabi::<$ty > (arg0, arg1) } } + #[unsafe (export_name = + "cabi_post_amplifier:modules/context-manager@1.0.0#get-messages-for-request")] + unsafe extern "C" fn _post_return_get_messages_for_request(arg0 : + * mut u8,) { unsafe { $($path_to_types)*:: + __post_return_get_messages_for_request::<$ty > (arg0) } } + #[unsafe (export_name = + "amplifier:modules/context-manager@1.0.0#set-messages")] unsafe + extern "C" fn export_set_messages(arg0 : * mut u8, arg1 : usize,) + -> * mut u8 { unsafe { $($path_to_types)*:: + _export_set_messages_cabi::<$ty > (arg0, arg1) } } #[unsafe + (export_name = + "cabi_post_amplifier:modules/context-manager@1.0.0#set-messages")] + unsafe extern "C" fn _post_return_set_messages(arg0 : * mut u8,) + { unsafe { $($path_to_types)*:: __post_return_set_messages::<$ty + > (arg0) } } #[unsafe (export_name = + "amplifier:modules/context-manager@1.0.0#clear")] unsafe extern + "C" fn export_clear() -> * mut u8 { unsafe { $($path_to_types)*:: + _export_clear_cabi::<$ty > () } } #[unsafe (export_name = + "cabi_post_amplifier:modules/context-manager@1.0.0#clear")] + unsafe extern "C" fn _post_return_clear(arg0 : * mut u8,) { + unsafe { $($path_to_types)*:: __post_return_clear::<$ty > (arg0) + } } }; + }; + } + #[doc(hidden)] + pub(crate) use __export_amplifier_modules_context_manager_1_0_0_cabi; + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct _RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + } + } + } +} +#[rustfmt::skip] +mod _rt { + #![allow(dead_code, clippy::all)] + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub use alloc_crate::vec::Vec; + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr, layout); + } + pub use alloc_crate::string::String; + extern crate alloc as alloc_crate; + pub use alloc_crate::alloc; +} +/// Generates `#[unsafe(no_mangle)]` functions to export the specified type as +/// the root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] +macro_rules! __export_context_module_impl { + ($ty:ident) => { + self::export!($ty with_types_in self); + }; + ($ty:ident with_types_in $($path_to_types_root:tt)*) => { + $($path_to_types_root)*:: + exports::amplifier::modules::context_manager::__export_amplifier_modules_context_manager_1_0_0_cabi!($ty + with_types_in $($path_to_types_root)*:: + exports::amplifier::modules::context_manager); + }; +} +#[doc(inline)] +pub(crate) use __export_context_module_impl as export; +#[cfg(target_arch = "wasm32")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:amplifier:modules@1.0.0:context-module:encoded world" +)] +#[doc(hidden)] +#[allow(clippy::octal_escapes)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 384] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\xfb\x01\x01A\x02\x01\ +A\x02\x01B\x0d\x01p}\x01j\0\x01s\x01@\x01\x07message\0\0\x01\x04\0\x0badd-messag\ +e\x01\x02\x01j\x01\0\x01s\x01@\0\0\x03\x04\0\x0cget-messages\x01\x04\x01@\x01\x06\ +params\0\0\x03\x04\0\x18get-messages-for-request\x01\x05\x01@\x01\x08messages\0\0\ +\x01\x04\0\x0cset-messages\x01\x06\x01@\0\0\x01\x04\0\x05clear\x01\x07\x04\0'amp\ +lifier:modules/context-manager@1.0.0\x05\0\x04\0&lifier:modules/context-modul\ +e@1.0.0\x04\0\x0b\x14\x01\0\x0econtext-module\x03\0\0\0G\x09producers\x01\x0cpro\ +cessed-by\x02\x0dwit-component\x070.227.1\x10wit-bindgen-rust\x060.41.0"; +#[inline(never)] +#[doc(hidden)] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/tests/fixtures/wasm/src/memory-context/src/lib.rs b/tests/fixtures/wasm/src/memory-context/src/lib.rs new file mode 100644 index 0000000..feb6fb9 --- /dev/null +++ b/tests/fixtures/wasm/src/memory-context/src/lib.rs @@ -0,0 +1,50 @@ +#[allow(warnings)] +mod bindings; + +use amplifier_guest::{ContextManager, Value}; +use std::sync::Mutex; + +static MESSAGES: Mutex> = Mutex::new(Vec::new()); + +#[derive(Default)] +struct MemoryContext; + +impl ContextManager for MemoryContext { + fn add_message(&self, message: Value) -> Result<(), String> { + MESSAGES + .lock() + .map_err(|e| format!("poisoned mutex: {e}"))? + .push(message); + Ok(()) + } + + fn get_messages(&self) -> Result, String> { + Ok(MESSAGES + .lock() + .map_err(|e| format!("poisoned mutex: {e}"))? + .clone()) + } + + fn get_messages_for_request(&self, _request: Value) -> Result, String> { + // No budget trimming — return all messages. + self.get_messages() + } + + fn set_messages(&self, messages: Vec) -> Result<(), String> { + let mut store = MESSAGES + .lock() + .map_err(|e| format!("poisoned mutex: {e}"))?; + *store = messages; + Ok(()) + } + + fn clear(&self) -> Result<(), String> { + MESSAGES + .lock() + .map_err(|e| format!("poisoned mutex: {e}"))? + .clear(); + Ok(()) + } +} + +amplifier_guest::export_context!(MemoryContext); diff --git a/tests/fixtures/wasm/src/memory-context/wit/context.wit b/tests/fixtures/wasm/src/memory-context/wit/context.wit new file mode 100644 index 0000000..95aedbd --- /dev/null +++ b/tests/fixtures/wasm/src/memory-context/wit/context.wit @@ -0,0 +1,29 @@ +// Minimal WIT for context-module world. +// Extracted from the main amplifier-modules.wit to avoid pulling in +// WASI HTTP dependencies that are only needed by the provider-module world. + +package amplifier:modules@1.0.0; + +/// Context manager interface — owns conversation memory policy. +interface context-manager { + /// Append a message to the context (Message proto, serialized). + add-message: func(message: list) -> result<_, string>; + + /// Get all messages (raw, uncompacted). Returns GetMessagesResponse proto. + get-messages: func() -> result, string>; + + /// Get messages for an LLM request (compacted). Accepts + /// GetMessagesForRequestParams proto, returns GetMessagesResponse proto. + get-messages-for-request: func(params: list) -> result, string>; + + /// Replace the entire message list (SetMessagesRequest proto). + set-messages: func(messages: list) -> result<_, string>; + + /// Clear all messages from context. + clear: func() -> result<_, string>; +} + +/// Tier 1: Pure-compute context manager module. +world context-module { + export context-manager; +} diff --git a/tests/fixtures/wasm/src/passthrough-orchestrator/.gitignore b/tests/fixtures/wasm/src/passthrough-orchestrator/.gitignore new file mode 100644 index 0000000..b83d222 --- /dev/null +++ b/tests/fixtures/wasm/src/passthrough-orchestrator/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/tests/fixtures/wasm/src/passthrough-orchestrator/Cargo.lock b/tests/fixtures/wasm/src/passthrough-orchestrator/Cargo.lock new file mode 100644 index 0000000..f7d3b53 --- /dev/null +++ b/tests/fixtures/wasm/src/passthrough-orchestrator/Cargo.lock @@ -0,0 +1,861 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "amplifier-guest" +version = "0.1.0" +dependencies = [ + "prost", + "serde", + "serde_json", + "wit-bindgen", +] + +[[package]] +name = "anyhow" +version = "1.0.102" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" + +[[package]] +name = "auditable-serde" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7bf8143dfc3c0258df908843e169b5cc5fcf76c7718bd66135ef4a9cd558c5" +dependencies = [ + "semver", + "serde", + "serde_json", + "topological-sort", +] + +[[package]] +name = "bitflags" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b147ee9d1f6d097cef9ce628cd2ee62288d963e16fb287bd9286455b241382d" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07bbe89c50d7a535e539b8c17bc0b49bdb77747034daa8087407d655f3f7cc1d" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e3450815272ef58cec6d564423f6e755e25379b217b0bc688e295ba24df6b1d" + +[[package]] +name = "futures-executor" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf29c38818342a3b26b5b923639e7b1f4a61fc5e76102d4b1981c6dc7a7579d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" + +[[package]] +name = "futures-macro" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-sink" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c39754e157331b013978ec91992bde1ac089843443c49cbc7f46150b0fad0893" + +[[package]] +name = "futures-task" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037711b3d59c33004d3856fbdc83b99d4ff37a24768fa1be9ce3538a1cde4393" + +[[package]] +name = "futures-util" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "389ca41296e6190b48053de0321d02a77f32f8a5d2461dd38762c0593805c6d6" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "slab", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "icu_collections" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" +dependencies = [ + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" + +[[package]] +name = "icu_properties" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" +dependencies = [ + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "equivalent", + "hashbrown 0.16.1", + "serde", + "serde_core", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "memchr" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "passthrough-orchestrator" +version = "0.1.0" +dependencies = [ + "amplifier-guest", + "serde_json", + "wit-bindgen-rt", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pin-project-lite" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "spdx" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e17e880bafaeb362a7b751ec46bdc5b61445a188f80e0606e68167cd540fa3" +dependencies = [ + "smallvec", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "wasm-encoder" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80bb72f02e7fbf07183443b27b0f3d4144abf8c114189f2e088ed95b696a7822" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce1ef0faabbbba6674e97a56bee857ccddf942785a336c8b47b42373c922a91d" +dependencies = [ + "anyhow", + "auditable-serde", + "flate2", + "indexmap", + "serde", + "serde_derive", + "serde_json", + "spdx", + "url", + "wasm-encoder", + "wasmparser", +] + +[[package]] +name = "wasmparser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f51cad774fb3c9461ab9bccc9c62dfb7388397b5deda31bf40e8108ccd678b2" +dependencies = [ + "bitflags", + "hashbrown 0.15.5", + "indexmap", + "semver", +] + +[[package]] +name = "wit-bindgen" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10fb6648689b3929d56bbc7eb1acf70c9a42a29eb5358c67c10f54dbd5d695de" +dependencies = [ + "wit-bindgen-rt", + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92fa781d4f2ff6d3f27f3cc9b74a73327b31ca0dc4a3ef25a0ce2983e0e5af9b" +dependencies = [ + "anyhow", + "heck", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rt" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db52a11d4dfb0a59f194c064055794ee6564eb1ced88c25da2cf76e50c5621" +dependencies = [ + "bitflags", + "futures", + "once_cell", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0809dc5ba19e2e98661bf32fc0addc5a3ca5bf3a6a7083aa6ba484085ff3ce" +dependencies = [ + "anyhow", + "heck", + "indexmap", + "prettyplease", + "syn", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.41.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad19eec017904e04c60719592a803ee5da76cb51c81e3f6fbf9457f59db49799" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "635c3adc595422cbf2341a17fb73a319669cc8d33deed3a48368a841df86b676" +dependencies = [ + "anyhow", + "bitflags", + "indexmap", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.227.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddf445ed5157046e4baf56f9138c124a0824d4d1657e7204d71886ad8ce2fc11" +dependencies = [ + "anyhow", + "id-arena", + "indexmap", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zmij" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" diff --git a/tests/fixtures/wasm/src/passthrough-orchestrator/Cargo.toml b/tests/fixtures/wasm/src/passthrough-orchestrator/Cargo.toml new file mode 100644 index 0000000..79186ed --- /dev/null +++ b/tests/fixtures/wasm/src/passthrough-orchestrator/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "passthrough-orchestrator" +version = "0.1.0" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +amplifier-guest = { path = "../../../../../crates/amplifier-guest" } +serde_json = "1" +wit-bindgen-rt = "0.41" + +[package.metadata.component] +package = "amplifier:passthrough-orchestrator" + +[package.metadata.component.target] +world = "orchestrator-module" +path = "wit" + +[workspace] diff --git a/tests/fixtures/wasm/src/passthrough-orchestrator/amplifier.toml b/tests/fixtures/wasm/src/passthrough-orchestrator/amplifier.toml new file mode 100644 index 0000000..9351f7a --- /dev/null +++ b/tests/fixtures/wasm/src/passthrough-orchestrator/amplifier.toml @@ -0,0 +1,4 @@ +[module] +transport = "wasm" +type = "orchestrator" + diff --git a/tests/fixtures/wasm/src/passthrough-orchestrator/src/bindings.rs b/tests/fixtures/wasm/src/passthrough-orchestrator/src/bindings.rs new file mode 100644 index 0000000..e218a63 --- /dev/null +++ b/tests/fixtures/wasm/src/passthrough-orchestrator/src/bindings.rs @@ -0,0 +1,288 @@ +// Generated by `wit-bindgen` 0.41.0. DO NOT EDIT! +// Options used: +// * runtime_path: "wit_bindgen_rt" +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod amplifier { + pub mod modules { + /// Kernel service interface — host-provided callbacks for guest modules. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod kernel_service { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::__link_custom_section_describing_imports; + use super::super::super::_rt; + #[allow(unused_unsafe, clippy::all)] + /// Execute a tool by name (ExecuteToolRequest proto, serialized as bytes). + /// Returns serialized result on success. + pub fn execute_tool(request: &[u8]) -> Result<_rt::Vec, _rt::String> { + unsafe { + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + let mut ret_area = RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + let vec0 = request; + let ptr0 = vec0.as_ptr().cast::(); + let len0 = vec0.len(); + let ptr1 = ret_area.0.as_mut_ptr().cast::(); + #[cfg(target_arch = "wasm32")] + #[link( + wasm_import_module = "amplifier:modules/kernel-service@1.0.0" + )] + unsafe extern "C" { + #[link_name = "execute-tool"] + fn wit_import2(_: *mut u8, _: usize, _: *mut u8); + } + #[cfg(not(target_arch = "wasm32"))] + unsafe extern "C" fn wit_import2(_: *mut u8, _: usize, _: *mut u8) { + unreachable!() + } + unsafe { wit_import2(ptr0.cast_mut(), len0, ptr1) }; + let l3 = i32::from(*ptr1.add(0).cast::()); + let result10 = match l3 { + 0 => { + let e = { + let l4 = *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len6 = l5; + _rt::Vec::from_raw_parts(l4.cast(), len6, len6) + }; + Ok(e) + } + 1 => { + let e = { + let l7 = *ptr1 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l8 = *ptr1 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let len9 = l8; + let bytes9 = _rt::Vec::from_raw_parts( + l7.cast(), + len9, + len9, + ); + _rt::string_lift(bytes9) + }; + Err(e) + } + _ => _rt::invalid_enum_discriminant(), + }; + result10 + } + } + } + } +} +#[rustfmt::skip] +#[allow(dead_code, clippy::all)] +pub mod exports { + pub mod amplifier { + pub mod modules { + /// Orchestrator interface — high-level agent-loop execution. + #[allow(dead_code, async_fn_in_trait, unused_imports, clippy::all)] + pub mod orchestrator { + #[used] + #[doc(hidden)] + static __FORCE_SECTION_REF: fn() = super::super::super::super::__link_custom_section_describing_imports; + use super::super::super::super::_rt; + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn _export_execute_cabi( + arg0: *mut u8, + arg1: usize, + ) -> *mut u8 { + #[cfg(target_arch = "wasm32")] _rt::run_ctors_once(); + let len0 = arg1; + let result1 = T::execute( + _rt::Vec::from_raw_parts(arg0.cast(), len0, len0), + ); + let ptr2 = (&raw mut _RET_AREA.0).cast::(); + match result1 { + Ok(e) => { + *ptr2.add(0).cast::() = (0i32) as u8; + let vec3 = (e).into_boxed_slice(); + let ptr3 = vec3.as_ptr().cast::(); + let len3 = vec3.len(); + ::core::mem::forget(vec3); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len3; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr3.cast_mut(); + } + Err(e) => { + *ptr2.add(0).cast::() = (1i32) as u8; + let vec4 = (e.into_bytes()).into_boxed_slice(); + let ptr4 = vec4.as_ptr().cast::(); + let len4 = vec4.len(); + ::core::mem::forget(vec4); + *ptr2 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::() = len4; + *ptr2 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>() = ptr4.cast_mut(); + } + }; + ptr2 + } + #[doc(hidden)] + #[allow(non_snake_case)] + pub unsafe fn __post_return_execute(arg0: *mut u8) { + let l0 = i32::from(*arg0.add(0).cast::()); + match l0 { + 0 => { + let l1 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l2 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + let base3 = l1; + let len3 = l2; + _rt::cabi_dealloc(base3, len3 * 1, 1); + } + _ => { + let l4 = *arg0 + .add(::core::mem::size_of::<*const u8>()) + .cast::<*mut u8>(); + let l5 = *arg0 + .add(2 * ::core::mem::size_of::<*const u8>()) + .cast::(); + _rt::cabi_dealloc(l4, l5, 1); + } + } + } + pub trait Guest { + /// Run the agent loop (OrchestratorExecuteRequest proto → OrchestratorExecuteResponse proto). + fn execute( + request: _rt::Vec, + ) -> Result<_rt::Vec, _rt::String>; + } + #[doc(hidden)] + macro_rules! __export_amplifier_modules_orchestrator_1_0_0_cabi { + ($ty:ident with_types_in $($path_to_types:tt)*) => { + const _ : () = { #[unsafe (export_name = + "amplifier:modules/orchestrator@1.0.0#execute")] unsafe extern + "C" fn export_execute(arg0 : * mut u8, arg1 : usize,) -> * mut u8 + { unsafe { $($path_to_types)*:: _export_execute_cabi::<$ty > + (arg0, arg1) } } #[unsafe (export_name = + "cabi_post_amplifier:modules/orchestrator@1.0.0#execute")] unsafe + extern "C" fn _post_return_execute(arg0 : * mut u8,) { unsafe { + $($path_to_types)*:: __post_return_execute::<$ty > (arg0) } } }; + }; + } + #[doc(hidden)] + pub(crate) use __export_amplifier_modules_orchestrator_1_0_0_cabi; + #[cfg_attr(target_pointer_width = "64", repr(align(8)))] + #[cfg_attr(target_pointer_width = "32", repr(align(4)))] + struct _RetArea( + [::core::mem::MaybeUninit< + u8, + >; 3 * ::core::mem::size_of::<*const u8>()], + ); + static mut _RET_AREA: _RetArea = _RetArea( + [::core::mem::MaybeUninit::uninit(); 3 + * ::core::mem::size_of::<*const u8>()], + ); + } + } + } +} +#[rustfmt::skip] +mod _rt { + #![allow(dead_code, clippy::all)] + pub use alloc_crate::vec::Vec; + pub use alloc_crate::string::String; + pub unsafe fn string_lift(bytes: Vec) -> String { + if cfg!(debug_assertions) { + String::from_utf8(bytes).unwrap() + } else { + String::from_utf8_unchecked(bytes) + } + } + pub unsafe fn invalid_enum_discriminant() -> T { + if cfg!(debug_assertions) { + panic!("invalid enum discriminant") + } else { + unsafe { core::hint::unreachable_unchecked() } + } + } + #[cfg(target_arch = "wasm32")] + pub fn run_ctors_once() { + wit_bindgen_rt::run_ctors_once(); + } + pub unsafe fn cabi_dealloc(ptr: *mut u8, size: usize, align: usize) { + if size == 0 { + return; + } + let layout = alloc::Layout::from_size_align_unchecked(size, align); + alloc::dealloc(ptr, layout); + } + extern crate alloc as alloc_crate; + pub use alloc_crate::alloc; +} +/// Generates `#[unsafe(no_mangle)]` functions to export the specified type as +/// the root implementation of all generated traits. +/// +/// For more information see the documentation of `wit_bindgen::generate!`. +/// +/// ```rust +/// # macro_rules! export{ ($($t:tt)*) => (); } +/// # trait Guest {} +/// struct MyType; +/// +/// impl Guest for MyType { +/// // ... +/// } +/// +/// export!(MyType); +/// ``` +#[allow(unused_macros)] +#[doc(hidden)] +macro_rules! __export_orchestrator_module_impl { + ($ty:ident) => { + self::export!($ty with_types_in self); + }; + ($ty:ident with_types_in $($path_to_types_root:tt)*) => { + $($path_to_types_root)*:: + exports::amplifier::modules::orchestrator::__export_amplifier_modules_orchestrator_1_0_0_cabi!($ty + with_types_in $($path_to_types_root)*:: + exports::amplifier::modules::orchestrator); + }; +} +#[doc(inline)] +pub(crate) use __export_orchestrator_module_impl as export; +#[cfg(target_arch = "wasm32")] +#[unsafe( + link_section = "component-type:wit-bindgen:0.41.0:amplifier:modules@1.0.0:orchestrator-module:encoded world" +)] +#[doc(hidden)] +#[allow(clippy::octal_escapes)] +pub static __WIT_BINDGEN_COMPONENT_TYPE: [u8; 357] = *b"\ +\0asm\x0d\0\x01\0\0\x19\x16wit-component-encoding\x04\0\x07\xdb\x01\x01A\x02\x01\ +A\x04\x01B\x04\x01p}\x01j\x01\0\x01s\x01@\x01\x07request\0\0\x01\x04\0\x0cexecut\ +e-tool\x01\x02\x03\0&lifier:modules/kernel-service@1.0.0\x05\0\x01B\x04\x01p}\ +\x01j\x01\0\x01s\x01@\x01\x07request\0\0\x01\x04\0\x07execute\x01\x02\x04\0$ampl\ +ifier:modules/orchestrator@1.0.0\x05\x01\x04\0+amplifier:modules/orchestrator-mo\ +dule@1.0.0\x04\0\x0b\x19\x01\0\x13orchestrator-module\x03\0\0\0G\x09producers\x01\ +\x0cprocessed-by\x02\x0dwit-component\x070.227.1\x10wit-bindgen-rust\x060.41.0"; +#[inline(never)] +#[doc(hidden)] +pub fn __link_custom_section_describing_imports() { + wit_bindgen_rt::maybe_link_cabi_realloc(); +} diff --git a/tests/fixtures/wasm/src/passthrough-orchestrator/src/lib.rs b/tests/fixtures/wasm/src/passthrough-orchestrator/src/lib.rs new file mode 100644 index 0000000..018cc5d --- /dev/null +++ b/tests/fixtures/wasm/src/passthrough-orchestrator/src/lib.rs @@ -0,0 +1,32 @@ +#[allow(warnings)] +mod bindings; + +use amplifier_guest::Orchestrator; + +/// Passthrough orchestrator that calls `echo-tool` via the kernel-service host import. +/// Proves that WASM guest modules can import and call host-provided functions. +#[derive(Default)] +struct PassthroughOrchestrator; + +impl Orchestrator for PassthroughOrchestrator { + fn execute(&self, prompt: String) -> Result { + // Build a JSON request for the echo-tool via the kernel service. + let input = serde_json::json!({ + "name": "echo-tool", + "input": { "prompt": prompt } + }); + let request_bytes = serde_json::to_vec(&input).map_err(|e| e.to_string())?; + + // Call the kernel-service host import to execute the echo-tool. + // This uses the WIT-generated import binding, not the placeholder in amplifier-guest. + let result_bytes = + bindings::amplifier::modules::kernel_service::execute_tool(&request_bytes)?; + + // Deserialize the result and return as a string. + let result: serde_json::Value = + serde_json::from_slice(&result_bytes).map_err(|e| e.to_string())?; + Ok(result.to_string()) + } +} + +amplifier_guest::export_orchestrator!(PassthroughOrchestrator); diff --git a/tests/fixtures/wasm/src/passthrough-orchestrator/wit/orchestrator.wit b/tests/fixtures/wasm/src/passthrough-orchestrator/wit/orchestrator.wit new file mode 100644 index 0000000..e8a29d9 --- /dev/null +++ b/tests/fixtures/wasm/src/passthrough-orchestrator/wit/orchestrator.wit @@ -0,0 +1,24 @@ +// Minimal WIT for orchestrator-module world. +// Includes the kernel-service import (host callbacks) and orchestrator export. +// This fixture proves that WASM guest modules can import host functions. + +package amplifier:modules@1.0.0; + +/// Kernel service interface — host-provided callbacks for guest modules. +interface kernel-service { + /// Execute a tool by name (ExecuteToolRequest proto, serialized as bytes). + /// Returns serialized result on success. + execute-tool: func(request: list) -> result, string>; +} + +/// Orchestrator interface — high-level agent-loop execution. +interface orchestrator { + /// Run the agent loop (OrchestratorExecuteRequest proto → OrchestratorExecuteResponse proto). + execute: func(request: list) -> result, string>; +} + +/// Tier 2: Orchestrator module — needs kernel callbacks for the agent loop. +world orchestrator-module { + import kernel-service; + export orchestrator; +} diff --git a/tests/test_ci_workflows.py b/tests/test_ci_workflows.py index 1599f2a..9cdc180 100644 --- a/tests/test_ci_workflows.py +++ b/tests/test_ci_workflows.py @@ -73,11 +73,11 @@ def test_rust_tests_runs_cargo_test(self): run_cmds = [s.get("run", "") for s in steps] assert any("cargo test" in r for r in run_cmds) - def test_rust_tests_runs_cargo_check_workspace(self): + def test_rust_tests_runs_cargo_check(self): wf = self._load() steps = wf["jobs"]["rust-tests"]["steps"] run_cmds = [s.get("run", "") for s in steps] - assert any("cargo check" in r and "--workspace" in r for r in run_cmds) + assert any("cargo check" in r and "amplifier-core" in r for r in run_cmds) def test_rust_tests_runs_cargo_fmt_check(self): wf = self._load() @@ -244,3 +244,46 @@ def test_publish_uses_pypi_action(self): steps = wf["jobs"]["publish"]["steps"] uses_list = [s.get("uses", "") for s in steps] assert any("pypi-publish" in u for u in uses_list) + + +class TestNodeBindingsCIWorkflow: + """Node.js binding tests in CI workflow.""" + + WORKFLOW_PATH = ROOT / ".github" / "workflows" / "rust-core-ci.yml" + + def _load(self) -> dict: + return _normalize_on_key(yaml.safe_load(self.WORKFLOW_PATH.read_text())) + + def test_has_node_tests_job(self): + wf = self._load() + assert "node-tests" in wf["jobs"] + + def test_node_tests_uses_setup_node(self): + wf = self._load() + steps = wf["jobs"]["node-tests"]["steps"] + uses_list = [s.get("uses", "") for s in steps] + assert any("setup-node" in u for u in uses_list) + + def test_node_tests_uses_rust_cache(self): + wf = self._load() + steps = wf["jobs"]["node-tests"]["steps"] + uses_list = [s.get("uses", "") for s in steps] + assert any("rust-cache" in u for u in uses_list) + + def test_node_tests_runs_npm_build(self): + wf = self._load() + steps = wf["jobs"]["node-tests"]["steps"] + run_cmds = [s.get("run", "") for s in steps] + assert any("npm" in r and "build" in r for r in run_cmds) + + def test_node_tests_runs_vitest(self): + wf = self._load() + steps = wf["jobs"]["node-tests"]["steps"] + run_cmds = [s.get("run", "") for s in steps] + assert any("vitest" in r for r in run_cmds) + + def test_node_tests_runs_clippy_for_node_binding(self): + wf = self._load() + steps = wf["jobs"]["node-tests"]["steps"] + run_cmds = [s.get("run", "") for s in steps] + assert any("cargo clippy" in r and "amplifier-core-node" in r for r in run_cmds) diff --git a/wit/amplifier-modules.wit b/wit/amplifier-modules.wit new file mode 100644 index 0000000..ff6bdbd --- /dev/null +++ b/wit/amplifier-modules.wit @@ -0,0 +1,155 @@ +// WIT interface definitions for Amplifier WASM modules. +// +// Defines the contract between host (kernel) and guest (WASM modules). +// All complex types are serialized as protobuf bytes (list) to avoid +// duplicating the full proto schema in WIT. The canonical proto definitions +// live in proto/amplifier_module.proto. + +package amplifier:modules@1.0.0; + +// --------------------------------------------------------------------------- +// Tier 1: Pure-compute interfaces (no host imports required) +// --------------------------------------------------------------------------- + +/// Tool module interface — exposes a single tool to the kernel. +interface tool { + /// Return the tool specification (ToolSpec proto, serialized). + get-spec: func() -> list; + + /// Execute the tool with proto-serialized input (ToolExecuteRequest). + /// Returns proto-serialized ToolExecuteResponse on success. + execute: func(input: list) -> result, string>; +} + +/// Hook handler interface — responds to lifecycle events. +interface hook-handler { + /// Handle a lifecycle event (HookHandleRequest proto, serialized). + /// Returns proto-serialized HookResult on success. + handle: func(event: list) -> result, string>; +} + +/// Context manager interface — owns conversation memory policy. +interface context-manager { + /// Append a message to the context (Message proto, serialized). + add-message: func(message: list) -> result<_, string>; + + /// Get all messages (raw, uncompacted). Returns GetMessagesResponse proto. + get-messages: func() -> result, string>; + + /// Get messages for an LLM request (compacted). Accepts + /// GetMessagesForRequestParams proto, returns GetMessagesResponse proto. + get-messages-for-request: func(params: list) -> result, string>; + + /// Replace the entire message list (SetMessagesRequest proto). + set-messages: func(messages: list) -> result<_, string>; + + /// Clear all messages from context. + clear: func() -> result<_, string>; +} + +/// Approval provider interface — human-in-the-loop approval gate. +interface approval-provider { + /// Request approval from the user (ApprovalRequest proto, serialized). + /// Returns proto-serialized ApprovalResponse on success. + request-approval: func(request: list) -> result, string>; +} + +// --------------------------------------------------------------------------- +// Tier 2: Interfaces that may need host imports or network access +// --------------------------------------------------------------------------- + +/// Provider interface — LLM completions in any language. +interface provider { + /// Return provider metadata (ProviderInfo proto, serialized). + get-info: func() -> list; + + /// List available models. Returns ListModelsResponse proto. + list-models: func() -> result, string>; + + /// Generate a completion (ChatRequest proto → ChatResponse proto). + complete: func(request: list) -> result, string>; + + /// Extract tool calls from a response (ChatResponse proto → + /// ParseToolCallsResponse proto). + parse-tool-calls: func(response: list) -> result, string>; +} + +/// Orchestrator interface — high-level agent-loop execution. +interface orchestrator { + /// Run the agent loop (OrchestratorExecuteRequest proto → + /// OrchestratorExecuteResponse proto). + execute: func(request: list) -> result, string>; +} + +// --------------------------------------------------------------------------- +// Host interface: kernel callbacks available to guest modules +// --------------------------------------------------------------------------- + +/// Kernel service interface — host-provided callbacks for guest modules. +/// Orchestrator and provider modules import this to call back into the kernel. +interface kernel-service { + /// Execute a tool by name (ExecuteToolRequest proto → ToolResult proto). + execute-tool: func(request: list) -> result, string>; + + /// Complete with a named provider (CompleteWithProviderRequest proto → + /// ChatResponse proto). + complete-with-provider: func(request: list) -> result, string>; + + /// Emit a hook event (EmitHookRequest proto → HookResult proto). + emit-hook: func(request: list) -> result, string>; + + /// Get conversation messages (GetMessagesRequest proto → + /// GetMessagesResponse proto). + get-messages: func(request: list) -> result, string>; + + /// Add a message to conversation (KernelAddMessageRequest proto). + add-message: func(request: list) -> result<_, string>; + + /// Look up a registered capability (GetCapabilityRequest proto → + /// GetCapabilityResponse proto). + get-capability: func(request: list) -> result, string>; + + /// Register a capability (RegisterCapabilityRequest proto). + register-capability: func(request: list) -> result<_, string>; +} + +// --------------------------------------------------------------------------- +// World definitions — one per module type +// --------------------------------------------------------------------------- + +/// Tier 1: Pure-compute tool module. +world tool-module { + export tool; +} + +/// Tier 1: Pure-compute hook handler module. +world hook-module { + export hook-handler; +} + +/// Tier 1: Pure-compute context manager module. +world context-module { + export context-manager; +} + +/// Tier 1: Pure-compute approval provider module. +world approval-module { + export approval-provider; +} + +/// Tier 2: Provider module — pure-compute for now; HTTP deferred. +/// +/// Note: `wasi:http/outgoing-handler` was previously listed here but removed +/// because `wasmtime-wasi-http` is not a dependency and the runtime linker +/// does not provide it. Provider WASM modules compiled against the HTTP +/// import would fail at instantiation. +world provider-module { + // HTTP imports deferred — requires wasmtime-wasi-http dependency (future work) + export provider; +} + +/// Tier 2: Orchestrator module — needs kernel callbacks for the agent loop. +world orchestrator-module { + import kernel-service; + export orchestrator; +} \ No newline at end of file