diff --git a/Cargo.lock b/Cargo.lock index 34b9142d2b0..b9fa392a8d9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -120,7 +120,7 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -131,7 +131,7 @@ checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -1138,7 +1138,7 @@ version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "faf9468729b8cbcea668e36183cb69d317348c2e08e994829fb56ebfdfbaac34" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -2297,7 +2297,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -2484,6 +2484,16 @@ dependencies = [ "futures-core", ] +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "fs_extra" version = "1.3.0" @@ -3324,7 +3334,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.5.10", + "socket2 0.6.3", "system-configuration", "tokio", "tower-service", @@ -3585,7 +3595,7 @@ checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -4331,7 +4341,7 @@ version = "0.50.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -4855,23 +4865,33 @@ dependencies = [ "arc-swap", "async-trait", "bimap", + "bip39", "bs58", + "dash-async", "dash-sdk", "dash-spv", "dashcore", + "dotenvy", "dpp", + "fs2", "grovedb-commitment-tree", "hex", "image", "key-wallet", "key-wallet-manager", + "parking_lot", "platform-encryption", "rand 0.8.5", + "rs-sdk-trusted-context-provider", + "serde", "serde_json", "sha2", + "simple-signer", "static_assertions", + "tempfile", "thiserror 1.0.69", "tokio", + "tokio-shared-rt", "tokio-util", "tracing", "tracing-subscriber", @@ -5133,8 +5153,8 @@ version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "343d3bd7056eda839b03204e68deff7d1b13aba7af2b2fd16890697274262ee7" dependencies = [ - "heck 0.4.1", - "itertools 0.10.5", + "heck 0.5.0", + "itertools 0.14.0", "log", "multimap", "petgraph", @@ -5155,7 +5175,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools 0.14.0", "proc-macro2", "quote", "syn 2.0.117", @@ -5168,7 +5188,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27c6023962132f4b30eb4c172c91ce92d933da334c59c23cddee82358ddafb0b" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools 0.14.0", "proc-macro2", "quote", "syn 2.0.117", @@ -5298,7 +5318,7 @@ dependencies = [ "quinn-udp", "rustc-hash 2.1.2", "rustls", - "socket2 0.5.10", + "socket2 0.6.3", "thiserror 2.0.18", "tokio", "tracing", @@ -5336,7 +5356,7 @@ dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2 0.5.10", + "socket2 0.6.3", "tracing", "windows-sys 0.60.2", ] @@ -6066,7 +6086,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.12.1", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -6125,7 +6145,7 @@ dependencies = [ "security-framework", "security-framework-sys", "webpki-root-certs", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -6660,6 +6680,8 @@ dependencies = [ "bincode", "dpp", "hex", + "key-wallet", + "thiserror 2.0.18", "tracing", ] @@ -6715,7 +6737,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -6948,7 +6970,7 @@ dependencies = [ "getrandom 0.4.2", "once_cell", "rustix 1.1.4", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -7229,6 +7251,28 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-shared-rt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a6bb03ec682a0bb16ce93d19301abc5b98a0d7936477175a156a213dcc47d85" +dependencies = [ + "once_cell", + "tokio", + "tokio-shared-rt-macro", +] + +[[package]] +name = "tokio-shared-rt-macro" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fe49a94e3a984b0d0ab97343dc3dcd52baae1ee13f005bfad39faea47d051dc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "tokio-stream" version = "0.1.18" @@ -7276,6 +7320,7 @@ dependencies = [ "futures-core", "futures-io", "futures-sink", + "futures-util", "pin-project-lite", "tokio", ] @@ -8352,7 +8397,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] diff --git a/packages/rs-platform-wallet/Cargo.toml b/packages/rs-platform-wallet/Cargo.toml index 13d0ccb00af..513380a71cc 100644 --- a/packages/rs-platform-wallet/Cargo.toml +++ b/packages/rs-platform-wallet/Cargo.toml @@ -59,6 +59,39 @@ tracing-subscriber = { version = "0.3", features = ["env-filter"] } # the non-test build keeps the leaner default-feature SDK above. dash-sdk = { path = "../rs-sdk", default-features = false, features = ["dashpay-contract", "dpns-contract", "mocks"] } +# E2E test framework — see `tests/e2e/` for the integration harness +# that exercises the wallet → SDK → broadcast pipeline against a +# live testnet bank wallet. Pinned to the canonical published crate +# names; cargo normalizes dash/underscore in keys but the published +# name is the source of truth (e.g. `tokio-shared-rt`). +tokio-shared-rt = "0.1" +tempfile = "3" +dotenvy = "0.15" +bip39 = "2" +fs2 = "0.4" +serde = { version = "1", features = ["derive"] } +simple-signer = { path = "../simple-signer", features = ["derive"] } +parking_lot = "0.12" +# `dash-async::block_on` is the runtime-flavor-agnostic bridge used by +# `framework/context_provider.rs` to call `SpvRuntime`'s async API +# from the synchronous `ContextProvider` trait. Handles all three +# tokio runtime scenarios (no runtime, current-thread, multi-thread) +# without the `block_in_place` panic that `tokio::task::block_in_place` +# triggers on a current-thread runtime. +dash-async = { path = "../rs-dash-async" } +# `rt` feature gives us `CancellationToken` for the panic-hook + +# graceful-shutdown wiring described in the e2e plan. +tokio-util = { version = "0.7", features = ["rt"] } +# `TrustedHttpContextProvider` is the e2e harness's current default +# context provider. It backs `Sdk::set_context_provider` with the +# operator-trusted Quorum HTTP endpoint built into the crate (per +# network) so testnet / mainnet runs work without spinning up an +# SPV client. The SPV-backed provider lives in `framework/spv.rs` +# and `framework/context_provider.rs` and is currently disabled +# (see harness.rs) — re-enable when SPV cold-start is stable +# (Task #15). +rs-sdk-trusted-context-provider = { path = "../rs-sdk-trusted-context-provider" } + [features] default = ["bls", "eddsa"] diff --git a/packages/rs-platform-wallet/src/changeset/changeset.rs b/packages/rs-platform-wallet/src/changeset/changeset.rs index d1afc6fbee2..9b7fe883f69 100644 --- a/packages/rs-platform-wallet/src/changeset/changeset.rs +++ b/packages/rs-platform-wallet/src/changeset/changeset.rs @@ -582,6 +582,36 @@ pub struct PlatformAddressChangeSet { /// Last block height with recent address changes (compaction marker). /// `None` means "no change". pub last_known_recent_block: Option, + /// Lower-bound static fee estimate for the transfer that produced + /// this changeset, in credits. `0` for changesets not produced by + /// `transfer()` (e.g. sync-only changesets). See + /// [`Self::estimated_min_fee`]. + pub fee: Credits, +} + +impl PlatformAddressChangeSet { + /// Lower-bound static fee estimate for the transfer that produced + /// this changeset, in credits. + /// + /// Returns `0` for changesets that didn't originate from a + /// `transfer()` call — e.g. sync-only changesets, or changesets + /// constructed via `Default::default()`. The value is the raw + /// `AddressFundsTransferTransition::estimate_min_fee(input_count, + /// output_count, version)` result captured at submit time — it is + /// **NOT** the actual on-chain fee and is **NOT** adjusted by the + /// `fee_strategy`. + /// + /// `estimate_min_fee` only models the static + /// `state_transition_min_fees` floor; chain-time fees include + /// storage + processing costs that scale with the operation set + /// (~6.5M static vs ~14.94M observed real for 1in/1out at the time + /// of writing). Tests asserting on the actual chain-time debit + /// must read the post-broadcast balance delta directly, not this + /// value. See platform issue #3040 for the open ticket on + /// upgrading `estimate_min_fee` to a chain-time-accurate estimate. + pub fn estimated_min_fee(&self) -> Credits { + self.fee + } } impl Merge for PlatformAddressChangeSet { @@ -606,6 +636,12 @@ impl Merge for PlatformAddressChangeSet { .map_or(r, |existing| existing.max(r)), ); } + // Fee: append-sum via `saturating_add`. Sync-only merges + // (`fee == 0`) are a no-op so a transfer's recorded fee + // survives untouched; merging two transfer changesets sums + // the per-operation fees so the merged total reflects the + // "total fee paid across operations in this batch" intent. + self.fee = self.fee.saturating_add(other.fee); } fn is_empty(&self) -> bool { @@ -613,6 +649,7 @@ impl Merge for PlatformAddressChangeSet { && self.sync_height.is_none() && self.sync_timestamp.is_none() && self.last_known_recent_block.is_none() + && self.fee == 0 } } diff --git a/packages/rs-platform-wallet/src/wallet/identity/state/manager/accessors.rs b/packages/rs-platform-wallet/src/wallet/identity/state/manager/accessors.rs index cfe81e52560..4e430588bb2 100644 --- a/packages/rs-platform-wallet/src/wallet/identity/state/manager/accessors.rs +++ b/packages/rs-platform-wallet/src/wallet/identity/state/manager/accessors.rs @@ -104,6 +104,20 @@ impl IdentityManager { .sum::() } + /// Snapshot of every managed identity's `Identifier` across both + /// buckets. Order is unspecified — callers that need a stable + /// order should sort the returned `Vec`. + pub fn identity_ids(&self) -> Vec { + let mut out: Vec = Vec::with_capacity(self.identity_count()); + out.extend(self.out_of_wallet_identities.keys().copied()); + for inner in self.wallet_identities.values() { + for managed in inner.values() { + out.push(managed.identity.id()); + } + } + out + } + /// `true` iff both buckets are empty. pub fn is_empty(&self) -> bool { self.out_of_wallet_identities.is_empty() && self.wallet_identities.is_empty() diff --git a/packages/rs-platform-wallet/src/wallet/platform_addresses/provider.rs b/packages/rs-platform-wallet/src/wallet/platform_addresses/provider.rs index d5836be9ff1..fa242d11d14 100644 --- a/packages/rs-platform-wallet/src/wallet/platform_addresses/provider.rs +++ b/packages/rs-platform-wallet/src/wallet/platform_addresses/provider.rs @@ -421,6 +421,18 @@ impl PlatformPaymentAddressProvider { self.last_known_recent_block = result.last_known_recent_block; } + /// Current `last_known_recent_block` watermark. + /// + /// Read-only mirror of the field used by the trait + /// implementation; exposed `pub` so wallet-level helpers + /// (notably [`super::wallet::PlatformAddressWallet::sync_watermark`]) + /// can return the value to callers without going through the + /// `AddressProvider` trait. Monotonic non-decreasing across + /// `sync_finished` calls. + pub fn last_known_recent_block(&self) -> u64 { + self.last_known_recent_block + } + /// Restore incremental-sync watermark from persisted state. pub(crate) fn set_stored_sync_state( &mut self, diff --git a/packages/rs-platform-wallet/src/wallet/platform_addresses/transfer.rs b/packages/rs-platform-wallet/src/wallet/platform_addresses/transfer.rs index dc2cec1c053..24aa97d74af 100644 --- a/packages/rs-platform-wallet/src/wallet/platform_addresses/transfer.rs +++ b/packages/rs-platform-wallet/src/wallet/platform_addresses/transfer.rs @@ -45,16 +45,25 @@ impl PlatformAddressWallet { let version = platform_version.unwrap_or(LATEST_PLATFORM_VERSION); - let address_infos = match input_selection { + // Capture (input_count, output_count) so we can compute the + // fee paid after broadcast for `PlatformAddressChangeSet::fee`. + // The output map is consumed by the SDK call below; the + // input map is materialized (`Auto`) or is the caller's + // (`Explicit*`). + let output_count = outputs.len(); + let (address_infos, input_count) = match input_selection { InputSelection::Explicit(inputs) => { if inputs.is_empty() { return Err(PlatformWalletError::AddressOperation( "Transfer requires at least one input address".to_string(), )); } - self.sdk + let n = inputs.len(); + let infos = self + .sdk .transfer_address_funds(inputs, outputs, fee_strategy, address_signer, None) - .await? + .await?; + (infos, n) } InputSelection::ExplicitWithNonces(inputs) => { if inputs.is_empty() { @@ -62,7 +71,9 @@ impl PlatformAddressWallet { "Transfer requires at least one input address".to_string(), )); } - self.sdk + let n = inputs.len(); + let infos = self + .sdk .transfer_address_funds_with_nonce( inputs, outputs, @@ -70,7 +81,8 @@ impl PlatformAddressWallet { address_signer, None, ) - .await? + .await?; + (infos, n) } InputSelection::Auto => { // Auto-select supports `[DeductFromInput(0)]` and @@ -89,12 +101,27 @@ impl PlatformAddressWallet { let inputs = self .auto_select_inputs(account_index, &outputs, &fee_strategy, version) .await?; - self.sdk + let n = inputs.len(); + let infos = self + .sdk .transfer_address_funds(inputs, outputs, fee_strategy, address_signer, None) - .await? + .await?; + (infos, n) } }; + // Lower-bound static estimate from `estimate_min_fee` — + // captures the `state_transition_min_fees` floor only, with + // no adjustment for the chosen `fee_strategy`. This crate + // ships transfers under both `[ReduceOutput(0)]` (the + // wallet-factory default) and `[DeductFromInput(0)]`; either + // way the chain-time fee scales with storage + processing + // costs and is typically larger than this value (see + // `PlatformAddressChangeSet::estimated_min_fee` for the + // honest doc and platform issue #3040). + let fee_paid = + AddressFundsTransferTransition::estimate_min_fee(input_count, output_count, version); + // Get the cached key source from the unified provider for gap // limit maintenance. let key_source = { @@ -106,7 +133,10 @@ impl PlatformAddressWallet { // Update balances in the ManagedPlatformAccount. let mut wm = self.wallet_manager.write().await; - let mut cs = PlatformAddressChangeSet::default(); + let mut cs = PlatformAddressChangeSet { + fee: fee_paid, + ..Default::default() + }; if let Some(info) = wm.get_wallet_info_mut(&self.wallet_id) { if let Some(account) = info .core_wallet diff --git a/packages/rs-platform-wallet/src/wallet/platform_addresses/wallet.rs b/packages/rs-platform-wallet/src/wallet/platform_addresses/wallet.rs index 0c08fc8a425..4fbec312273 100644 --- a/packages/rs-platform-wallet/src/wallet/platform_addresses/wallet.rs +++ b/packages/rs-platform-wallet/src/wallet/platform_addresses/wallet.rs @@ -254,6 +254,28 @@ impl PlatformAddressWallet { .unwrap_or_default() } + /// Read the current incremental-sync watermark from the unified + /// platform-address provider. + /// + /// Returns `None` when the provider hasn't been initialised yet + /// (no [`Self::initialize`] call) or when the provider has no stored + /// watermark (whether restored via [`Self::apply_sync_state`] or + /// produced by a previous sync). The value is monotonic non-decreasing + /// across [`Self::sync_balances`](super::sync) calls against the + /// same chain — a later sync can only advance the watermark, never + /// roll it back. A zero-valued watermark is reported as `None` to + /// match the "no stored watermark" convention used elsewhere in + /// the wallet (see [`Self::apply_sync_state`]). + pub async fn sync_watermark(&self) -> Option { + let guard = self.provider.read().await; + let raw = guard.as_ref().map(|p| p.last_known_recent_block())?; + if raw == 0 { + None + } else { + Some(raw) + } + } + /// Get total platform credits across all addresses. /// /// Returns the sum of all cached balances. diff --git a/packages/rs-platform-wallet/tests/.env.example b/packages/rs-platform-wallet/tests/.env.example new file mode 100644 index 00000000000..2f690b1996f --- /dev/null +++ b/packages/rs-platform-wallet/tests/.env.example @@ -0,0 +1,49 @@ +# `rs-platform-wallet` E2E test framework — operator configuration. +# +# Copy this file to `tests/.env` (do NOT commit `.env`; the workspace +# `.gitignore` covers it) and fill in `PLATFORM_WALLET_E2E_BANK_MNEMONIC` +# with a BIP-39 seed phrase for a Platform-address wallet that already +# holds at least `PLATFORM_WALLET_E2E_MIN_BANK_CREDITS` credits. +# +# `tests/.env` is loaded automatically by `framework::config::Config::from_env` +# (anchored at `${CARGO_MANIFEST_DIR}/tests/.env`, so the path is +# deterministic regardless of the caller's CWD). Process env vars take +# precedence over `.env` values — `dotenvy::from_path` does NOT +# overwrite already-set variables. + +# REQUIRED. BIP-39 mnemonic for the bank wallet. Bank must hold +# `>= PLATFORM_WALLET_E2E_MIN_BANK_CREDITS` credits before the first +# test run; under-funded loads panic with the bank's primary receive +# address printed so the operator knows where to top up. +PLATFORM_WALLET_E2E_BANK_MNEMONIC="" + +# OPTIONAL. Network selector — `testnet` (default), `mainnet`, +# `devnet`, `regtest`/`local`. Most operators want testnet. +# PLATFORM_WALLET_E2E_NETWORK=testnet + +# OPTIONAL. Comma-separated DAPI endpoint URLs. Overrides the SDK's +# built-in seed list for the selected network. Useful when running +# against a private cluster. +# PLATFORM_WALLET_E2E_DAPI_ADDRESSES="https://my-dapi-1.example:1443,https://my-dapi-2.example:1443" + +# OPTIONAL. Minimum bank balance threshold (credits). Defaults to +# 500_000_000 (5x the ~115M per-run cost; see platform #3040). +# Bumping this gates the harness against starting with too little +# to fund several test wallets. +# PLATFORM_WALLET_E2E_MIN_BANK_CREDITS=500000000 + +# OPTIONAL. Workdir base path; the framework picks a slot under this +# directory and holds a `flock` for the test-process lifetime so +# concurrent runs on the same machine don't collide. Defaults to +# `${TMPDIR}/dash-platform-wallet-e2e`. +# PLATFORM_WALLET_E2E_WORKDIR=/tmp/dash-platform-wallet-e2e + +# OPTIONAL. Override URL for the trusted HTTP context provider. +# Defaults to the network-builtin endpoint baked into +# `rs-sdk-trusted-context-provider` (testnet/mainnet endpoints +# included). Required for devnet runs and any custom trust anchor. +# PLATFORM_WALLET_E2E_TRUSTED_CONTEXT_URL="https://quorums.testnet.networks.dash.org" + +# OPTIONAL. Tracing filter. Increase to `debug`/`trace` for detailed +# sync output during a test run. +# RUST_LOG=info,platform_wallet=debug diff --git a/packages/rs-platform-wallet/tests/e2e.rs b/packages/rs-platform-wallet/tests/e2e.rs new file mode 100644 index 00000000000..28186802755 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e.rs @@ -0,0 +1,14 @@ +//! End-to-end integration tests for `rs-platform-wallet`. +//! +//! Single test binary with a process-shared `E2eContext` (bank +//! wallet, SDK, panic-safe registry). `framework/` provides the +//! harness; `cases/` hosts `#[tokio_shared_rt::test(shared)]` entries. + +#![allow(dead_code, unused_imports)] + +// `tests/e2e.rs` is the integration-test crate root; explicit +// `#[path]` keeps the on-disk layout grouped under `tests/e2e/`. +#[path = "e2e/cases/mod.rs"] +mod cases; +#[path = "e2e/framework/mod.rs"] +mod framework; diff --git a/packages/rs-platform-wallet/tests/e2e/README.md b/packages/rs-platform-wallet/tests/e2e/README.md new file mode 100644 index 00000000000..f22adb62cc8 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/README.md @@ -0,0 +1,380 @@ +# E2E Test Framework — `rs-platform-wallet` + +## Status + +This framework was assembled across Waves 1-18, audited by QA in Wave 5, and exercised +end-to-end against Dash testnet. The single `transfer_between_two_platform_addresses` +test runs green: `cargo check` / `cargo clippy` / `cargo fmt --check` pass, and the +live happy-path run has been executed successfully in this branch. Future reruns +still require a testnet bank wallet pre-funded with +`>= PLATFORM_WALLET_E2E_MIN_BANK_CREDITS` credits; once an operator provisions one +and exports `PLATFORM_WALLET_E2E_BANK_MNEMONIC` (or sets it in `tests/.env`), the +harness is ready to run again via `cargo test` (see [Running tests](#running-tests)). + +The runtime-flavor defect surfaced during the QA-001 reproduction (default +`tokio_shared_rt::test(shared)` lands on a current-thread runtime, which previously +panicked inside the SPV-backed context provider's `block_in_place` bridge) is +resolved. The harness now defaults to +[`TrustedHttpContextProvider`](#context-provider) and the retained +`SpvContextProvider` was rewritten in Wave 7 to use `dash_async::block_on`, which is +runtime-flavor agnostic. Multi-thread is therefore no longer strictly required, but +we still recommend +`#[tokio_shared_rt::test(shared, flavor = "multi_thread", worker_threads = 12)]` — +it mirrors the `dash-evo-tool/tests/backend-e2e/` precedent and gives SPV background +tasks (when re-enabled per Task #15) head-room. The canonical pattern below uses it. + +End-to-end tests that exercise the full wallet -> SDK -> broadcast pipeline against a +live Dash testnet. The framework validates platform-address credit operations through +the same `PlatformWalletManager` and `dash-sdk` layers used by production applications. + +The design is modelled on `dash-evo-tool/tests/backend-e2e/`, with one important +difference in funding strategy: where DET uses Core asset locks to move value from +Layer 1 to Platform, this framework uses a **platform-address bank wallet** that +already holds credits. This avoids the need for a funded Core UTXO wallet and an +asset-lock broadcast during test initialization. + +The directory is named `e2e/` rather than `platform_e2e/` because Core-feature tests +(SPV-driven UTXO operations) will land here too once the wallet's Core SPV pipeline is +stable enough to drive from tests. See [Future Core support](#future-core-support). + +--- + +## Prerequisites + +- A **testnet bank wallet** — a BIP-39 seed phrase for a Platform address that already + holds enough credits to fund tests. You need this exactly once; subsequent runs + recover unused test-wallet funds automatically. +- Network access to Dash testnet DAPI nodes (default) or a local/devnet cluster. +- Rust toolchain (stable, matches workspace `rust-toolchain.toml`). + +Tests are gated behind `#[ignore]` so a stock `cargo test` (or workspace-wide +invocation) stays green for contributors and CI jobs that lack a funded testnet +bank wallet, live DAPI access, and the operator `.env`. To execute the live suite +once setup is in place, opt in explicitly with `--ignored`: + +```bash +cargo test --test e2e -- --ignored --nocapture +``` + +If `PLATFORM_WALLET_E2E_BANK_MNEMONIC` is unset when an opt-in run starts, the +harness panics with an actionable message naming the bank's primary receive +address — the failure is operator-actionable, not silent. An under-funded bank +wallet panics with the same "top up at <address>" pointer. + +--- + +## Environment variables + +The framework reads configuration from the process environment and from +`packages/rs-platform-wallet/tests/.env` (anchored at `${CARGO_MANIFEST_DIR}/tests/.env`, +loaded via `dotenvy::from_path`). The path is deterministic regardless of the +shell's CWD — the framework matches the convention used by `rs-sdk` and +`rs-sdk-ffi`'s integration-test harnesses. + +A canonical operator template lives at `tests/.env.example` — copy it to +`tests/.env` and fill in the bank mnemonic before the first run: + +```bash +cp packages/rs-platform-wallet/tests/.env.example \ + packages/rs-platform-wallet/tests/.env +# then edit `packages/rs-platform-wallet/tests/.env` to set +# PLATFORM_WALLET_E2E_BANK_MNEMONIC +``` + +| Var | Required | Default | Purpose | +|-----|----------|---------|---------| +| `PLATFORM_WALLET_E2E_BANK_MNEMONIC` | yes | — | BIP-39 mnemonic for the bank wallet. This wallet must hold at least `PLATFORM_WALLET_E2E_MIN_BANK_CREDITS` credits before the first test runs. | +| `PLATFORM_WALLET_E2E_NETWORK` | no | `testnet` | Network to connect to: `testnet`, `devnet`, or `local`. | +| `PLATFORM_WALLET_E2E_DAPI_ADDRESSES` | no | network default | Comma-separated list of DAPI endpoint URLs. Overrides the SDK's built-in seed list for the selected network. | +| `PLATFORM_WALLET_E2E_MIN_BANK_CREDITS` | no | `500_000_000` | Minimum credit balance required in the bank wallet before initialization completes. If the bank is below this threshold the process panics with the bank's receive address so you know where to top it up. | +| `PLATFORM_WALLET_E2E_WORKDIR` | no | `${TMPDIR}/dash-platform-wallet-e2e` | Base path for the slot-locked working directory. SPV block cache, the test-wallet registry, and SDK state are stored here. | +| `PLATFORM_WALLET_E2E_TRUSTED_CONTEXT_URL` | no | network-builtin | Override URL for the trusted HTTP context provider. Leave unset to use the testnet/mainnet endpoint baked into `rs-sdk-trusted-context-provider`; required for devnet runs and any custom trust anchor. | +| `RUST_LOG` | no | `info,rs_platform_wallet=debug` | Tracing filter passed to `tracing-subscriber`. Increase to `debug` or `trace` for detailed sync output. | + +Shell-exported variables take precedence — `dotenvy::from_path` does NOT overwrite +variables already set in the process environment. The workspace `.gitignore` covers +`.env` files anywhere under the tree, so the operator file never gets committed. + +--- + +## Bank pre-funding (one-time) + +The bank wallet is loaded from `PLATFORM_WALLET_E2E_BANK_MNEMONIC` on the first run. +If its credit balance is below `PLATFORM_WALLET_E2E_MIN_BANK_CREDITS`, initialization +panics with a message like: + +```text +Bank wallet under-funded. + balance : 0 credits + required: 500000000 credits + top up at: yXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX + +Send testnet platform credits to the address above, then re-run the tests. +``` + +Copy the printed address and use any testnet-funded wallet to send credits to it: + +- **dash-evo-tool** — send from an existing DET identity's platform address. +- **wasm-sdk demo** — the browser demo supports platform-address transfers. +- Any other tool that can broadcast a platform-address credit transfer on testnet. + +After the transfer confirms (typically a few seconds on testnet), re-run the tests. +The bank does not need topping up again until its balance drops below the minimum, +which the startup sweep helps prevent by recovering funds from completed test wallets. + +--- + +## Running tests + +```bash +# After copying tests/.env.example -> tests/.env and filling in the bank mnemonic: +cd packages/rs-platform-wallet +cargo test --test e2e -- --nocapture +``` + +Or override the mnemonic inline if you keep multiple banks: + +```bash +PLATFORM_WALLET_E2E_BANK_MNEMONIC="..." cargo test --test e2e -- --nocapture +``` + +The first run takes **60–180 seconds**: + +- The harness installs `TrustedHttpContextProvider` against the configured DAPI + endpoints — first-run latency is dominated by the bank wallet's BLAST sync pass, + not SPV startup. Cold runs typically finish setup in 5–15 s; subsequent runs in + the same workdir slot reuse the SDK / token cache and are faster. +- The bank wallet runs a BLAST sync pass to discover its credit balances. +- The startup sweep recovers any wallets left over from previous panicked runs. +- Each test funds a fresh wallet, performs transfers, and tears down. + +> If the optional `SpvContextProvider` is wired in (Task #15), expect an +> additional 30–60 s on cold cache for the masternode-list sync. + +Run a single test by appending its name: + +```bash +cargo test --test e2e -- --nocapture transfer_between_two_platform_addresses +``` + +Tracing output (SPV sync events, balance polls, sweep results) is written to stderr. +`--nocapture` keeps it visible in the terminal. + +--- + +## Multi-process safety + +Multiple `cargo test` invocations running concurrently — for example, parallel CI jobs +on different branches — must not share the same bank wallet or working directory, or +they will conflict on nonces. + +The framework handles this at two levels: + +**Workdir slots** — each process tries to acquire an exclusive `flock` on the base +working directory. If that lock is already held it tries up to 10 numbered slot +directories (`-1`, `-2`, ...). A slot holds the SPV block cache, +the SDK config, and the test-wallet registry independently from every other slot. + +**Per-environment bank mnemonics** — two processes that share a mnemonic but land on +different slots will still conflict at the network level (duplicate nonces). The +correct isolation strategy is to give each CI environment its own distinct +`PLATFORM_WALLET_E2E_BANK_MNEMONIC`. The framework documents this requirement but +cannot enforce it across machines. + +Typical CI setup: + +```bash +# Branch A job +PLATFORM_WALLET_E2E_BANK_MNEMONIC="$BANK_MNEMONIC_BRANCH_A" cargo test ... + +# Branch B job (different secret) +PLATFORM_WALLET_E2E_BANK_MNEMONIC="$BANK_MNEMONIC_BRANCH_B" cargo test ... +``` + +--- + +## Panic-safe cleanup + +Every test wallet is registered in a JSON file at `/test_wallets.json` +**before** the test starts — not after. If a test panics, the wallet's seed remains in +the registry so the next run can recover it. + +### Happy path + +`setup_guard.teardown()` is the explicit, recommended path: + +1. Syncs the test wallet's balances. +2. Transfers any remaining credits back to the bank's primary address. +3. Removes the wallet entry from the registry and de-registers it from the manager. + +> Teardown does NOT block waiting for the bank to observe the inbound credits — the +> sweep transition is broadcast and confirmed by the chain, and the bank wallet +> re-syncs lazily on its next operation. Tests that immediately follow up with bank +> ops should call `bank.sync_balances().await` to refresh the cached view. + +### Panic path + +If `teardown()` is not called — because the test panicked or returned early — the +`SetupGuard` `Drop` implementation logs a warning: + +```text +SetupGuard dropped without explicit teardown — wallet +will be swept on next test process startup +``` + +The wallet entry stays in `test_wallets.json`. On the next run, the startup sweep +(`sweep_orphans`) iterates all registry entries, reconstructs each wallet from its +stored seed, syncs, and transfers remaining credits back to the bank. Successfully +swept wallets are removed from the registry; wallets that fail to sweep (transient +network error) are marked `Failed` and retried on the following run. + +The registry uses atomic writes (write to a temp file, then rename) to avoid +corruption from mid-write crashes. + +--- + +## Troubleshooting + +- **Bank under-funded** — Initialization panics with the bank's receive address and + the current balance. Top up the printed address from any testnet wallet and re-run. + The minimum threshold is controlled by `PLATFORM_WALLET_E2E_MIN_BANK_CREDITS` + (default 500 000 000 credits). + +- **DAPI / context-provider unreachable** — `TrustedHttpContextProvider` calls fail + if the configured DAPI endpoints are unreachable. Check `PLATFORM_WALLET_E2E_DAPI_ADDRESSES` + and network connectivity. Setting `RUST_LOG=debug` shows which DAPI nodes are + being contacted. (The optional SPV path adds its own ~30–60 s masternode-list + sync timeout — only relevant if `SpvContextProvider` is wired in.) + +- **Workdir slot exhausted** — If all 10 slots are locked, initialization fails with: + `no available workdir slots (tried 10 under )`. This typically means 10+ + concurrent processes are running against the same `PLATFORM_WALLET_E2E_WORKDIR` + base. Either wait for other processes to finish, remove stale lock files from + the slot directories (`rm */.lock`), or set `PLATFORM_WALLET_E2E_WORKDIR` + to a distinct path per environment. + +- **Test panicked — registry not cleared** — On the next run, the startup sweep log + will report `swept N wallets from previous panicked run`. This is expected behavior. + If the sweep itself fails (the orphaned wallet has no balance, or the network is + unavailable), the entry is marked `Failed` and retried on the following run. Entries + with a `Failed` status do not block test execution. + +--- + +## Context provider + +The harness installs +[`rs-sdk-trusted-context-provider::TrustedHttpContextProvider`](../../../rs-sdk-trusted-context-provider) +as the SDK's context provider at construction time. That provider answers quorum +public-key lookups over a trusted HTTP endpoint (testnet / mainnet defaults are +baked into the crate), which keeps e2e runs fast and reliable without spinning up +an SPV client. + +Override the endpoint via `PLATFORM_WALLET_E2E_TRUSTED_CONTEXT_URL` when running +against devnet, a custom test cluster, or any non-default trust anchor. + +```bash +PLATFORM_WALLET_E2E_TRUSTED_CONTEXT_URL="https://my-trusted-quorum.example/" \ + cargo test --test e2e -- --nocapture +``` + +--- + +## Deferred + +- **SPV-based context provider** (Task #15). The framework keeps the SPV plumbing + (`framework/spv.rs`, `framework/context_provider.rs`) compilable but disabled: + see the commented-out block in `framework/harness.rs::E2eContext::build`. Re-enable + by uncommenting that block once SPV cold-start is stable enough to drive from + tests; the `TrustedHttpContextProvider` swap is a single-line change. + +--- + +## Future Core support + +The directory is intentionally named `e2e/` rather than `platform_e2e/`. Once the +wallet's SPV-driven Core operations (UTXO selection, transaction broadcast, asset +locks) are stable enough to test end-to-end, Core-feature tests will live alongside +the existing platform-address tests under `tests/e2e/cases/core/`. + +When Task #15 lands, an `SpvRuntime` will run for the lifetime of the test process +and `SpvContextProvider` will be live-swapped into the SDK after mn-list sync. +Future identity and Core tests will get SPV-backed proof verification at that +point without changing the public test API. + +--- + +## Architecture quick reference + +The framework initializes once per test-binary process. All tests in `tests/e2e/` +share a single `E2eContext` via a `tokio::sync::OnceCell`. + +| Symbol | Where | What it does | +|--------|-------|-------------| +| `setup()` | `framework/mod.rs` | Initializes `E2eContext` (once), creates a fresh test wallet, registers it in the JSON registry, and returns a `SetupGuard`. | +| `SetupGuard.ctx` | `framework/wallet_factory.rs` | Reference to the shared `E2eContext` — holds the SDK, bank wallet, SPV runtime, and registry. | +| `SetupGuard.test_wallet` | `framework/wallet_factory.rs` | Fresh `TestWallet` for this test, pre-registered for panic-safe cleanup. | +| `ctx.bank().fund_address(addr, credits)` | `framework/bank.rs` | Transfers `credits` from the bank wallet to `addr`. Serialized within the process by `FUNDING_MUTEX`. | +| `test_wallet.transfer(outputs)` | `framework/wallet_factory.rs` | Broadcasts a platform-address credit transfer and returns a `PlatformAddressChangeSet`. | +| `wait_for_balance(wallet, addr, credits, timeout)` | `framework/wait.rs` | Polls the wallet's balance cache until `addr` holds at least `credits`, or times out. | +| `setup_guard.teardown()` | `framework/wallet_factory.rs` | Returns remaining credits to the bank, removes wallet from registry, de-registers from manager. | + +Canonical test pattern: + +```rust +use crate::framework::prelude::*; + +#[tokio_shared_rt::test(shared, flavor = "multi_thread", worker_threads = 12)] +async fn transfer_between_two_platform_addresses() { + let s = setup().await.expect("e2e setup failed"); + + let addr_1 = s.test_wallet.next_unused_address().await.unwrap(); + s.ctx.bank().fund_address(&addr_1, 100_000_000).await.unwrap(); + wait_for_balance(&s.test_wallet, &addr_1, 70_000_000, Duration::from_secs(60)) + .await + .unwrap(); + + let addr_2 = s.test_wallet.next_unused_address().await.unwrap(); + s.test_wallet + .transfer(std::iter::once((addr_2, 50_000_000)).collect()) + .await + .unwrap(); + + wait_for_balance(&s.test_wallet, &addr_2, 1_000_000, Duration::from_secs(60)) + .await + .unwrap(); + + // The production wallet does not surface a `fee_paid` accessor; + // derive it from the balance delta. `received + remaining + fee + // == funded`, so `fee = funded - received - remaining`. + let balances = s.test_wallet.balances().await; + let received = balances.get(&addr_2).copied().unwrap_or(0); + let remaining = balances.get(&addr_1).copied().unwrap_or(0); + let fee = 100_000_000_u64.saturating_sub(received).saturating_sub(remaining); + assert!(received >= 1_000_000 && received < 50_000_000); + assert!(fee > 0 && fee < 50_000_000); + + s.teardown().await.expect("teardown failed"); +} +``` + +The `shared` runtime attribute is not optional. SPV (when re-enabled per Task #15) +spawns background tasks bound to the runtime that created them. With `#[tokio::test]` +each test would create its own runtime; the first test's exit would drop that runtime +and kill SPV's background tasks, causing channel-closed errors in later tests. + +For deeper implementation details — module responsibilities, registry schema, signer +design, workdir slot algorithm — refer to the plan file at +`.claude/plans/ok-now-we-ll-get-prancy-biscuit.md`. + +> **Runtime flavor is recommended, not strictly required.** With the current +> `TrustedHttpContextProvider` default and the retained `SpvContextProvider`'s +> `dash_async::block_on` bridge (Wave 7), tests no longer panic on a +> current-thread runtime. We still recommend +> `flavor = "multi_thread", worker_threads = 12` to mirror the DET precedent and +> to leave head-room for SPV-backed providers and other concurrent background +> work; the canonical example uses it. + +--- + +Co-authored by [Claudius the Magnificent](https://github.com/lklimek/claudius) AI Agent diff --git a/packages/rs-platform-wallet/tests/e2e/TEST_SPEC.md b/packages/rs-platform-wallet/tests/e2e/TEST_SPEC.md new file mode 100644 index 00000000000..5ea2ed791e1 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/TEST_SPEC.md @@ -0,0 +1,1853 @@ +# `rs-platform-wallet` e2e — Test Case Specification + +Brain the size of a planet, and here I am cataloguing test cases. Right then. +This document enumerates the work to do; another document, somewhere, will +presumably enumerate the joy of doing it. + +--- + +## 1. Overview + +The `rs-platform-wallet` end-to-end suite lives at +`packages/rs-platform-wallet/tests/e2e/` and executes against Dash testnet via +the SDK and a pre-funded "bank" platform-address wallet. The harness was +introduced in PR #3549 (branch `feat/rs-platform-wallet-e2e`) and ships with a +single live case — `transfer_between_two_platform_addresses` — exercising +platform-address credit transfer between two addresses owned by the same test +wallet. + +This specification proposes a layered set of cases, grouped by feature area, +prioritised P0/P1/P2, and annotated with the harness extensions each requires. +Every case targets the production `PlatformWallet` API surface (no test-only +shims into the wallet), uses the bank-funded credit model already wired in +`framework/bank.rs`, and assumes the same network model PR #3549 ships with: +testnet by default, devnet/local by env override, no Layer-1 / Core-UTXO +assumptions for any P0/P1 case (Task #15 — SPV — is the gating dependency for +Core-feature tests). + +The spec is implementation-agnostic. Authors should consume it, not migrate it +verbatim from `dash-evo-tool` (DET) — DET parallels are cited only to anchor +intent and to surface battle-tested edge cases. The harness lives on top of +`PlatformWalletManager` and a `TrustedHttpContextProvider`, +so anything requiring SPV proofs, asset locks, shielded notes, or fresh contract +deployment is explicitly deferred (see §5). + +### 1.1 Priority scheme + +Every test case carries one of three priority levels. The priority drives both +listing order within a section and CI gating tier. + +- **P0 — Primary path.** The happy path that demonstrates the feature works. + CI-gating tier; failure blocks merge. Execute first. +- **P1 — Core variants.** Negative paths and alternate-input variants of P0 + cases that protect the primary contract. Execute alongside P0 in CI. +- **P2 — Edge cases.** Boundary, empty-input, concurrency, malformed-input, + and discovered-gap cases. Run nightly / on-demand; not gating unless an + active regression makes one of them so. Execute after P0/P1. + +Within each feature-area subsection (Platform Addresses, Identity, Tokens, +DPNS, Dashpay, etc.), test cases are listed P0 first, then P1, then P2. The +suffix-letter convention (e.g. `PA-001b`, `PA-002c`) groups variant cases next +to their parent; new top-level edge cases get fresh dense IDs (e.g. `PA-009`, +`PA-010`). No existing case ID is renumbered; new cases slot in adjacent to +their parent. + +### 1.2 Mnemonic / seed source + +Mnemonics used by the harness (bank wallet, every `TestWallet`) MUST be drawn +from the BIP-39 English wordlist. Out-of-band entropy paths — raw entropy, +non-BIP-39 wordlists, or arbitrary UTF-8 strings fed as "mnemonic" — are out +of scope for this suite. Any test that generates a seed does so via the +BIP-39 mnemonic generator already used by `framework/wallet_factory.rs`. Cases +that exercise non-ASCII content (e.g. Unicode display names) do so on +downstream fields, not on the seed. + +--- + +## 2. Harness capability matrix + +Honest snapshot of what PR #3549 can drive today vs. what each test area still +needs. "Wallet API exists" reflects what `packages/rs-platform-wallet/src/` +already exposes; "Harness ready" reflects whether +`packages/rs-platform-wallet/tests/e2e/framework/` can drive it without code +changes. + +| Area | Wallet API exists | Harness ready | Gaps to fill | Out of scope (and why) | +|------|-------------------|---------------|--------------|------------------------| +| Platform Addresses | yes (`platform_addresses/{transfer,sync,withdrawal,fund_from_asset_lock}`) | yes for transfer/sync; partial for withdrawal | needs `wait_for_balance_eq` (exact-equality variant), needs explicit-input transfer helper, needs withdrawal Core-balance verification stub | `withdraw` end-to-end (Layer-1 observation, blocked on Task #15); `fund_from_asset_lock` (Core UTXO needed, bank holds credits not coins) | +| Identity | yes (`identity/network/{register_from_addresses,top_up_from_addresses,registration,update,transfer,transfer_to_addresses,withdrawal}`) | no | `Signer` impl, identity-key derivation helper, `TestWallet::register_identity_from_addresses`, `wait_for_identity_balance` | asset-lock-funded register/top-up (DET territory; bank holds credits); identity withdrawal (Layer-1 observation) | +| Tokens | yes (`tokens/wallet.rs` and `identity/network/tokens/*`) | no | `Signer`, identity setup, contract-token discovery helper, `TestTokenContract` fixture pointer | fresh contract deployment (no testnet contract registry); group-action workflows that need multi-identity coordination outside one harness | +| Core / SPV | yes (`core/{wallet,balance,broadcast,balance_handler}`) | no — `spv_runtime: None` by design | enable SPV runtime (gated on Task #15), `wait_for_core_balance`, faucet helper | broadcast tests until SPV stable; tx-is-ours flag tests (DET parity, P2) | +| Asset Lock | yes (`asset_lock/{build,manager,sync,tracked,lock_notify_handler}`) | no | needs Core-UTXO funded test wallet, SPV runtime, `wait_for_asset_lock` | full path until Task #15 — bank wallet has no Core UTXOs | +| Shielded | yes (`shielded/{keys,note_selection,operations,prover,store,sync}`) | no | not a small extension — prover, viewing keys, note selection | entire surface — separate prover/keys complexity, defer to a dedicated suite | +| Contracts | yes (`identity/network/contract.rs::create_data_contract_with_signer`) | no | identity signer, schema fixtures (`tests/fixtures/contracts/`), `wait_for_contract_visible` | `replace`/`transfer` of an arbitrary deployed contract owned elsewhere — gated on a contract-registry strategy | +| DPNS | yes (`identity/network/dpns.rs::{register_name_with_external_signer,resolve_name,sync_dpns_names,contest_vote_state}`) | no | identity signer, name uniqueness (random suffix), `wait_for_dpns_name` | contested-name auctions (P2; multi-identity orchestration heavy) | +| Dashpay | yes (`identity/network/{profile,contact_requests,contacts,payments,dashpay_sync}`) | no | identity signer, two test identities + DPNS for one of them, `wait_for_contact_request` | full multi-step lifecycle relying on contact-request acceptance round trips beyond a single happy-path | +| Contested Names | yes (via DPNS contest API) | no | identity signer, multi-identity setup, vote orchestration | P2 only; testnet contest auctions are slow and DET already covers this end-to-end | + +Source citations for the "Wallet API exists" column are listed inline per case +(§3) using `file:line` form. + +--- + +## 3. Test cases — ranked + +### Quick index + +| ID | Title | Priority | Complexity | +|----|-------|----------|------------| +| PA-001 | Multi-output platform-address transfer | P0 | S | +| PA-002 | Partial-fund + change handling | P0 | S | +| PA-004 | Sweep-back: drain test wallet, observe bank credit | P0 | S | +| PA-003 | Fee scaling: one-output vs. five-output | P1 | M | +| PA-005 | Address rotation: gap-limit + observed-used cursor | P1 | M | +| PA-006 | Replay safety: same outputs, second submission rejected | P1 | M | +| PA-007 | Sync watermark idempotency | P1 | M | +| PA-008 | Concurrent funding from bank: serialised | P1 | S | +| PA-002b | Zero-change exact-equality (`Σ outputs + fee == input balance`) | P1 | S | +| PA-010 | Bank starvation: typed `BankUnderfunded` error | P1 | S | +| PA-001b | Transfer with `output_change_address: None` vs `Some(addr)` | P2 | S | +| PA-001c | Zero-credit single-output transfer | P2 | S | +| PA-004b | Sweep dust threshold boundary triplet | P2 | M | +| PA-004c | Sweep with exactly zero balance | P2 | S | +| PA-005b | `DEFAULT_GAP_LIMIT` triplet (19 / 20 / 21 unused) | P2 | M | +| PA-006b | Two concurrent broadcasts of identical ST bytes | P2 | M | +| PA-007b | Two concurrent `sync_balances` on one wallet | P2 | M | +| PA-008b | Two `TestWallet`s × three concurrent funders each | P2 | M | +| PA-008c | Observable serialisation of `FUNDING_MUTEX` | P2 | M | +| PA-009 | `min_input_amount` boundary triplet for cleanup | P2 | M | +| PA-011 | Workdir slot exhaustion at `MAX_SLOTS + 1` | P2 | M | +| PA-012 | `sync_balances` racing with `transfer` | P2 | M | +| PA-013 | Broadcast retry under transient DAPI 5xx | P2 | M | +| PA-014 | Multi-output at protocol-max output count | P2 | M | +| ID-001 | Register identity funded from platform addresses | P0 | L | +| ID-002 | Top-up identity from platform addresses | P0 | M | +| ID-003 | Identity-to-identity credit transfer | P0 | M | +| ID-004 | Identity update: add and disable a key | P1 | L | +| ID-005 | Transfer credits from identity to platform addresses | P1 | M | +| ID-006 | Refresh and load identity by index | P1 | M | +| ID-001b | `setup_with_n_identities(N)` multi-identity helper | P1 | M | +| ID-001c | Non-default `StateTransitionSettings` (`wait_for_proof = false`) | P2 | M | +| ID-003b | Concurrent identity-to-identity transfers serialise on identity nonce | P2 | M | +| ID-005b | `transfer_credits_to_addresses` with empty outputs | P2 | S | +| ID-006b | Identity-key derivation index boundary (`0` and `DEFAULT_GAP_LIMIT - 1`) | P2 | M | +| TK-001 | Token transfer between two identities | P1 | L | +| TK-001b | Token transfer of amount 0 | P2 | S | +| TK-002 | Token claim (perpetual / pre-programmed distribution) | P2 | L | +| TK-003 | Token mint (authorised identity) | P2 | M | +| TK-004 | Token burn | P2 | M | +| CR-001 | SPV mn-list sync readiness | P1 | M | +| CR-002 | Core wallet receive address derivation | P1 | M | +| CR-003 | Asset-lock-funded identity registration (full path) | P2 | L | +| CT-001 | Document put: deploy a fixture data contract | P1 | M | +| CT-002 | Document put / replace lifecycle | P2 | M | +| CT-003 | Contract update (add document type) | P2 | M | +| DPNS-001 | Register and resolve a `.dash` name | P0 | M | +| DPNS-001b | Name-length boundary quartet (2 / 3 / 63 / 64 chars) | P2 | M | +| DPNS-001c | DPNS name with a multibyte character | P2 | S | +| DPNS-002 | Resolve a known external name (negative-only) | P2 | S | +| DP-001 | Set DashPay profile | P1 | M | +| DP-001b | Profile with optional fields `None` vs `Some` | P2 | M | +| DP-001c | Profile `display_name` containing emoji / RTL text | P2 | S | +| DP-002 | Send and accept a contact request | P1 | L | +| DP-003 | Send a DashPay payment | P2 | L | +| CN-001 | Initiate a contested DPNS name (premium / 3-char) | P2 | L | +| CN-002 | Cast a masternode vote on a contested name | DEFERRED | — | +| Harness-G1a | Corrupted registry JSON: refuse to overwrite | P2 | M | +| Harness-G1b | Registry forward-compatible unknown field | P2 | S | +| Harness-G4 | Drop `wallet.transfer` future mid-flight, recover on next sync | P2 | L | +| Harness-ID-1 | `sweep_identities` regression: registered identities surrender credits at teardown | P0 | S | + +#### Found-bug pins + +| ID | Title | Priority | Complexity | +|----|-------|----------|------------| +| Found-001 | `auto_select_inputs_for_withdrawal` ignores `min_input_amount` floor | P2 | S | +| Found-002 | `auto_select_inputs_for_withdrawal` skips fee-target headroom check | P2 | M | +| Found-003 | `addresses_with_balances` and `total_credits` only see the first platform-payment account | P2 | S | +| Found-004 | `transfer` / `withdraw` / `fund_from_asset_lock` silently fall back to `address_index = 0` on lookup miss | P2 | S | +| Found-005 | `register_from_addresses` / `top_up_from_addresses` discard SDK-returned address balances and nonces | P2 | M | +| Found-006 | `top_up_identity_with_funding` ignores caller-supplied `topup_index` | P2 | S | +| Found-007 | `PlatformAddressSyncManager::start` lacks a generation guard so a fast `start()` → `stop()` → `start()` can spawn parallel sync threads | P2 | M | +| Found-008 | `LockNotifyHandler` uses `notify_waiters()` so a lock event arriving in the check / wait gap of `wait_for_proof` is dropped | P2 | M | +| Found-009 | wallet-event adapter swallows `RecvError::Lagged` events without compensating recovery | P2 | M | +| Found-010 | `PlatformAddressChangeSet::apply` ignores `funds.nonce` so persister-only nonce state can drift behind balance | P2 | S | +| Found-011 | `IdentityChangeSet::merge` documents commutativity but `insert + tombstone` for the same key resolves to "removed" regardless of submission order | P2 | S | +| Found-012 | `validate_or_upgrade_proof` and `wait_for_proof` only consult `standard_bip44_accounts`, missing CoinJoin / non-BIP-44 funding accounts | P2 | M | +| Found-013 | `recover_asset_lock_blocking` swallows every error and returns `()` — silent recovery failure | P2 | S | +| Found-014 | `transfer_credits_with_external_signer` never updates the receiver's local balance even when the receiver is wallet-owned | P2 | S | +| Found-015 | `load_from_persistor` leaves a partially registered wallet in `wallet_manager` when `wallet_id` mismatches | P2 | M | +| Found-016 | `remove_wallet` removes from `self.wallets` then `self.wallet_manager` non-atomically, leaving a window where readers see only one of the two | P2 | M | +| Found-017 | `register_wallet` registers wallet in memory even when persister `store` returns `Err` — vanishes on next launch | P2 | S | +| Found-018 | `PlatformAddressChangeSet::merge` documents fee semantics as "fee paid by the transfer that produced this changeset" but actually accumulates fees across merged changesets | P2 | S | + +Counts by priority: **P0: 8**, **P1: 17** (incl. 2 post-Task #15), **P2: 53** (incl. 1 post-Task #15, 1 gated, 18 Found-bug pins), **DEFERRED: 1** (79 total entries; 60 baseline + 18 Found-bug pins + 1 deferred placeholder). + +### Platform Addresses (PA) + +#### PA-001 — Multi-output platform-address transfer (one tx, N outputs) +- **Priority**: P0 +- **Status**: IMPLEMENTED — passing (testnet; gated by `cargo test -p platform-wallet --tests` plus operator env vars per `tests/e2e/README.md`). +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31` (`PlatformAddressWallet::transfer`) +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/wallet_tasks.rs:561` (`tc_014_wallet_platform_lifecycle`) covers a transfer; multi-output is a derivative variant. +- **Preconditions**: bank funded; `setup()` returns a fresh `TestWallet`. +- **Scenario**: + 1. Derive `addr_1` on test wallet; bank-fund with `90_000_000` credits; wait for balance. + 2. Derive `addr_2`, `addr_3` after the funding sync (two consecutive `next_unused_address` calls return distinct addresses only because the pool cursor advanced — see PA-005 for the assertion). + 3. Self-transfer `{addr_2: 20_000_000, addr_3: 30_000_000}` from `addr_1` in one call. + 4. Wait for `addr_2` and `addr_3` to each reach their target balance. +- **Assertions**: + - `balances[addr_2] == 20_000_000` + - `balances[addr_3] == 30_000_000` + - `total_credits == 90_000_000 - fee` (fee derived from balance delta) + - `0 < fee < 5_000_000` (fee scales sub-linearly with output count — guards regression of fee strategy). **Implementation note (post-Status update):** the active test pins `0 < fee < 30_000_000` because platform issue #3040 leaves chain-time fees ~20M for 1in/2out (vs the static `state_transition_min_fees` floor ~6.5M). The 5M ceiling is restored once #3040 lands and `calculate_min_required_fee` reflects chain-time reality. + - One observable on-chain change-set update, not two (wallet returned a single `PlatformAddressChangeSet`). +- **Negative variants**: + - Outputs total exceeds funded balance → expect `PlatformWalletError` of insufficient-funds shape. + - Empty output map → expect a typed validation error (not a panic). + - Duplicate output address (two entries with same `PlatformAddress`) → BTreeMap dedup is implicit; assert collapsed semantics. +- **Harness extensions required**: none. +- **Estimated complexity**: S +- **Rationale**: Closes the obvious gap left by `PR #3549` — the only existing case is one-input/one-output. Multi-output catches fee-scaling regressions, change-output handling, and any off-by-one on the `BTreeMap` plumbing into `transfer()`. + +#### PA-002 — Partial-fund + change handling (output < input balance) +- **Priority**: P0 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31`, `InputSelection::Auto` path (`platform_addresses/mod.rs:30`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/wallet_tasks.rs:234` (`step_transfer_credits`). +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Bank-fund `addr_1` with `60_000_000`. + 2. Transfer `5_000_000` to a fresh `addr_2`. + 3. Sync `addr_1` post-transfer. +- **Assertions**: + - `balances[addr_2] == 5_000_000` + - `balances[addr_1] == 60_000_000 - 5_000_000 - fee` (≈ `54_999_…`) + - `fee > 0` + - Inputs were drawn only from `addr_1` (assert `balances` over a third address `addr_3` not derived — sanity). +- **Negative variants**: + - Same scenario but with `InputSelection::Explicit({addr_2: …})` where `addr_2` has zero balance → typed insufficient-funds error. +- **Harness extensions required**: none for the happy path; the negative variant needs a thin `TestWallet::transfer_with_inputs` helper (~10 LoC). +- **Estimated complexity**: S +- **Rationale**: Confirms `Σ inputs == Σ outputs + fee` invariant — the property recently fixed in commits `aaf8be74ee` and `9ea9e7033c`. Without this case those regressions would be invisible. + +#### PA-004 — Sweep-back: drain test wallet, observe bank credit +- **Priority**: P0 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31` invoked from `framework/cleanup.rs::teardown_one`. +- **DET parallel**: implicit in DET — every test ends with bank refund. We surface it as a first-class case. +- **Preconditions**: bank-funded; test wallet seeded; baseline bank balance recorded before fund. +- **Scenario**: + 1. Record `bank_pre = bank.total_credits()`. + 2. Bank-fund `addr_1` with `40_000_000`. + 3. Wait for test wallet to observe. + 4. Call `setup_guard.teardown()` (sweep path). + 5. Wait for bank balance to reflect the inbound sweep. +- **Assertions**: + - `bank_post >= bank_pre - 40_000_000 - fund_fee - sweep_fee` + - `bank_post <= bank_pre - 40_000_000 - fund_fee + 40_000_000` (no double-credit) + - The test wallet's registry entry is removed (`registry.get(wallet_id).is_none()`). + - Total round-trip fee ≤ `1_000_000` credits (regression bound on combined cost). +- **Negative variants**: + - Test wallet balance below `SWEEP_DUST_THRESHOLD` (5M) → sweep is skipped, wallet still de-registered with `Skipped` status (assert `cleanup` log + final registry state). +- **Harness extensions required**: needs a `Bank::total_credits` accessor exposed to tests (already implemented at `framework/bank.rs:225`); needs `TestRegistry::get_status(wallet_id)` (~10 LoC if not already present). +- **Estimated complexity**: S +- **Rationale**: Validates the cleanup invariant the README promises in §"Panic-safe cleanup". Without this, a regression in `cleanup.rs` would silently leak credits across runs — bank slowly drains, eventually trips under-funded panic, no test ever names the cause. + +#### PA-003 — Fee scaling: one-output vs. five-output transfers +- **Priority**: P1 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31`, fee-strategy `AddressFundsFeeStrategyStep::DeductFromInput(0)` from `wallet_factory.rs:210`. +- **DET parallel**: none directly — DET tests `tc_014` lifecycle but not fee scaling explicitly. +- **Preconditions**: bank-funded test wallet with ≥ `200_000_000`. +- **Scenario**: + 1. Bank-fund `addr_1` with `100_000_000`. + 2. Transfer `5_000_000` to `addr_2` (single output). Record `fee_1`. + 3. Bank-fund `addr_3` with `100_000_000`. + 4. Transfer `1_000_000` each to `addr_4..addr_8` (five outputs). Record `fee_5`. +- **Assertions**: + - `fee_1 > 0`, `fee_5 > 0` + - `fee_5 > fee_1` (more outputs ⇒ larger byte size ⇒ larger fee) + - `fee_5 < 5 * fee_1` (sub-linear — outputs share inputs/headers) + - Documented bound: `fee_5 - fee_1 < 1_000_000` (regression guard; tighten once empirical numbers are known). +- **Negative variants**: none — this is a property test. +- **Harness extensions required**: none. +- **Estimated complexity**: M (two transfers + bookkeeping ≈ 100-150 LoC) +- **Rationale**: Encodes fee scaling as an asserted property. CodeRabbit fee-headroom regressions (commit `687b1f86cd`) and future fee-formula tweaks become test failures rather than silent behaviour shifts. + +#### PA-005 — Address rotation: gap-limit + observed-used cursor +- **Priority**: P1 +- **Status**: IMPLEMENTED — passing (4 of spec's 16 rounds; runtime budget compromise, sustained-rotation property at 16+ rounds untested). +- **Wallet feature exercised**: `wallet/platform_addresses/wallet.rs:180` (`next_unused_receive_address`); `provider::PerAccountPlatformAddressState`. +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/wallet_tasks.rs:19` (`tc_012_generate_receive_address`). +- **Preconditions**: bank-funded test wallet; `DEFAULT_GAP_LIMIT = 20`. +- **Scenario**: + 1. Call `next_unused_address()` three times back-to-back BEFORE any sync. All three must return the same address (cursor is parked until first observed-used). + 2. Bank-fund the address; wait for balance. + 3. Call `next_unused_address()` once more. Must return a different address. + 4. Repeat steps 2-3 three more times (4 rounds total), funding each new address in turn. +- **Assertions**: + - First three calls return the same `PlatformAddress` (cursor not advanced). + - Each post-funding call advances the cursor: all 5 observed addresses (initial + 4 advances) are pairwise distinct. + - Every funded address holds at least `FUND_FLOOR` credits after a final balance sync (no misrouted funding). +- **Negative variants**: + - Derive 21+ unused addresses without funding — expect either gap-limit growth or a typed "gap exceeded" error (whichever the wallet contract defines; this case will surface that contract). +- **Harness extensions required**: none. +- **Estimated complexity**: M (bookkeeping ≈ 150 LoC; 4 funding round-trips are comfortably within P1 runtime budget). +- **Rationale**: The fix in commit `60f7850ab0` ("sort auto-select candidates by balance descending") is one of several invariants in the address provider that needs a regression test. PA-005 also documents the "cursor advances on observed-used" property that bit Wave 8 in PR #3549 (see `cases/transfer.rs:91-97`). The original spec called for 16 rounds (chain RTT × 16 ≈ 8 min); trimmed to 4 rounds as a P1-tier runtime compromise (QA-007). Sustained rotation through the full DIP-17 gap window remains untested at this tier — tracked for a dedicated slow-test variant. The previously listed assertion `signer.cached_key_count() >= 17` was struck (QA-008): `SimpleSigner` exposes no such accessor; the reference was to an unrelated `SeedBackedIdentitySigner` method. + +#### PA-006 — Replay safety: same outputs, second submission rejected +- **Priority**: P1 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: nonce handling inside `PutPlatformAddresses::put_with_address_funding_fetching_nonces` (re-broadcast). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/wallet_tasks.rs:234` indirectly tests nonces. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Fund `addr_1` with `50_000_000`. + 2. Capture the underlying state-transition bytes (requires exposing the changeset's `serialized_transition` — see harness extension below). + 3. Transfer `10_000_000` to `addr_2` (succeeds). + 4. Submit the captured bytes a second time via `sdk.broadcast_state_transition` directly. +- **Assertions**: + - Second submission returns a "stale nonce" / "already exists" SDK error (assert error class). + - Wallet's view of `addr_1` and `addr_2` is unchanged after the failed re-submit. +- **Negative variants**: none — this case IS the negative variant of PA-001. +- **Harness extensions required**: a `TestWallet::transfer_capturing_st_bytes` helper that returns the encoded ST alongside the change-set. ~30 LoC, plumbs through the SDK's `put_*` builder rather than `transfer()`. +- **Estimated complexity**: M (single-file, harness touch) +- **Rationale**: Closes a quiet but high-blast-radius regression class — nonce handling. If the SDK ever stops bumping nonces correctly, every wallet's "spam-click" UX breaks. PA-006 surfaces it deterministically. + +#### PA-007 — Sync watermark idempotency +- **Priority**: P1 +- **Status**: IMPLEMENTED — passing (positive path only). The negative variant ("disconnect from DAPI, expect typed network error, balances unchanged") is NOT covered by the current test file; it requires a per-test SDK with a swappable DAPI URL, but the harness today shares one `Sdk` across the process via `E2eContext::sdk`. Tracked as a follow-up: tightening would mean either a `TestWallet::with_sdk_override(bogus_url)` helper or a controllable DAPI proxy (sibling of PA-013). Out of scope for this PR. +- **Wallet feature exercised**: `wallet/platform_addresses/sync.rs:24` (`sync_balances`); `wallet/platform_addresses/wallet.rs:153` (`restore_sync_state`). +- **DET parallel**: implicit in DET's wallet-task lifecycle. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Bank-fund `addr_1` with `30_000_000`; wait. + 2. Call `sync_balances` three times in a row. + 3. Capture the post-sync watermark via `wallet.platform()..last_known_recent_block` (read through public state guard). +- **Assertions**: + - All three syncs succeed. + - Watermark is monotonic non-decreasing across calls. + - Cached balances are byte-equal across calls (no spurious mutation on re-sync). +- **Negative variants**: + - Disconnect from DAPI (config override to a bogus URL) and call `sync_balances` → typed network error; cached balances unchanged. +- **Harness extensions required**: an accessor on `TestWallet` to read the platform-address provider's sync state (or expose it through the existing `platform_wallet()` borrow + a public watermark getter on the provider — already on the API, just needs threading). +- **Estimated complexity**: M +- **Rationale**: Re-sync idempotency is silently load-bearing — UI clients call `sync_balances` on every refresh tick. A regression that double-counts on re-sync would be visually obvious in apps and silent in unit tests; PA-007 makes it explicit. + +#### PA-008 — Concurrent funding from bank: serialised by FUNDING_MUTEX +- **Priority**: P1 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: `framework/bank.rs::fund_address` and its `FUNDING_MUTEX` invariant. +- **DET parallel**: none — DET's bank model differs. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Derive `addr_1`, `addr_2`, `addr_3`. + 2. Spawn three concurrent `bank.fund_address` tasks (each `10_000_000`). + 3. Await all three. + 4. Sync. +- **Assertions**: + - All three addresses end with the funded amount (no nonce collisions, no lost funding). + - Total bank decrease == `30_000_000 + 3 * fund_fee`. + - No panic in `FUNDING_MUTEX` path. +- **Negative variants**: none — this case validates concurrency safety as a property. +- **Harness extensions required**: none. +- **Estimated complexity**: S +- **Rationale**: Encodes the FUNDING_MUTEX guarantee documented in `framework/bank.rs:39`. Without it, a future refactor that drops the mutex (or misuses it) would corrupt nonces and only surface intermittently. + +#### PA-002b — Zero-change exact-equality (`Σ outputs + fee == input balance`) +- **Priority**: P1 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31`; change-output suppression at the `Σ inputs == Σ outputs` boundary recently fixed in `aaf8be74ee` and `9ea9e7033c`. +- **DET parallel**: none — this is a regression-pinning case for our own commits. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Bank-fund `addr_1` with `60_000_000` and let it settle. Record `bal_1 = addr_1` balance. + 2. Build a one-output transfer `{addr_2: bal_1 - estimated_fee}` where `estimated_fee` is derived from the wallet's fee preview (or a calibrated PA-003 measurement). + 3. Tighten the output by 1 credit at a time until `Σ outputs + actual_fee == bal_1` exactly. Submit. +- **Assertions**: + - Transfer succeeds (no spurious "below dust" or change-output validation error). + - The on-wire state-transition contains exactly **one** output (the destination); no change output is materialised. + - `addr_1` post-balance == `0` exactly. Not `1`, not `dust_threshold`, not `None`. + - `balances[addr_2] == bal_1 - actual_fee` exactly. +- **Negative variants**: none (this case IS the boundary). +- **Harness extensions required**: a `TestWallet::estimate_transfer_fee(&outputs)` helper, or fall back to PA-003's empirical fee constants. +- **Estimated complexity**: S +- **Rationale**: Pins the `Σ inputs == Σ outputs + fee` invariant the wallet just shipped regressions on. Without an exact-equality boundary case, that bug-class re-emerges silently the next time the change-output predicate is touched. + +#### PA-010 — Bank starvation: typed `BankUnderfunded` error +- **Priority**: P1 +- **Status**: BLOCKED — needs harness refactor: per-test bank instance (e.g. `Bank::with_test_balance(target)`) OR injectable balance override on the singleton, plus a typed `BankError::Underfunded { available, requested }` variant on `framework/bank.rs`. The current `OnceCell`-backed singleton panics at load time and `fund_address` returns a generic `PlatformWalletError::AddressOperation` on under-fund, neither of which matches PA-010's contract. +- **Wallet feature exercised**: `framework/bank.rs::fund_address` precondition checks. +- **DET parallel**: none — operator-actionable harness contract. +- **Preconditions**: bank deliberately underfunded for the test (e.g. configure a fresh test bank with `5_000_000` total credits). +- **Scenario**: + 1. Configure the harness so `bank.total_credits()` is below the test's requested fund amount. + 2. Call `bank.fund_address(addr_1, 30_000_000)`. +- **Assertions**: + - `bank.fund_address` returns a typed `BankError::Underfunded { available, requested }` (or the equivalent named variant — pin whatever the code calls it). No panic, no generic `anyhow!` shape. + - Error message names the bank wallet id, the available balance, and the requested amount, so an operator can act without code-diving. + - The bank's funding mutex is released cleanly (a follow-up successful call after re-funding the bank works). + - Test wallet registry contains no half-created entry from the failed fund. +- **Negative variants**: none. +- **Harness extensions required**: a typed error variant on `framework/bank.rs` (most likely already present; confirm name); a way to construct an underfunded bank for the test (a `Bank::with_balance_for_test(...)` constructor or a fresh bank wallet pre-drained). +- **Estimated complexity**: S +- **Rationale**: Bank starvation is the single most common "weird CI failure" mode for this suite, and the failure mode shouldn't be a panic from inside `fund_address`. PA-010 makes the operator-actionable error part of the contract. + +#### PA-001b — Transfer with `output_change_address: None` vs `Some(addr)` +- **Priority**: P2 +- **Status**: BLOCKED — feature missing in production: `PlatformAddressWallet::transfer` has no `output_change_address: Option` parameter today (verified at `packages/rs-platform-wallet/src/wallet/platform_addresses/transfer.rs:31`). The drift is filed as Found-020 above; resolution is either spec realignment or a production extension. +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31`; the `output_change_address: Option` argument routes change either to an auto-derived address or to an explicit one. +- **DET parallel**: none — exercises an Option-branch the existing PA cases never split. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Bank-fund `addr_1` with `60_000_000`. + 2. Run transfer `{addr_2: 5_000_000}` with `output_change_address: None`. Record the address that ended up holding the change. + 3. Bank-fund a fresh `addr_3` with `60_000_000`. + 4. Derive an explicit `change_addr` separately from `addr_3` (and from any output address). + 5. Run transfer `{addr_4: 5_000_000}` from `addr_3` with `output_change_address: Some(change_addr)`. +- **Assertions**: + - `None` branch: change lands on the wallet-internal documented "auto-derive change" address (likely the next unused receive address); record exactly which one and pin the rule in the assertion. + - `Some(change_addr)` branch: change balance shows up on `change_addr` exactly, and not on the source or any other address. + - In both branches `Σ inputs == Σ outputs + fee` holds. +- **Negative variants**: + - `output_change_address: Some(addr_with_existing_balance)` → assert merge-or-reject contract (whichever the wallet defines). +- **Harness extensions required**: none. +- **Estimated complexity**: S +- **Rationale**: The `Option` argument has no asserted contract today — `None` could drift into "change is silently lost" without a single test failing. + +#### PA-001c — Zero-credit single-output transfer +- **Priority**: P2 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31` boundary at output-amount zero. +- **DET parallel**: none. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Bank-fund `addr_1` with `30_000_000`. + 2. Call `transfer({addr_2: 0})` from `addr_1`. +- **Assertions**: pin one of the two contracts (whichever the wallet implements): + - **(a) Reject**: a typed validation error of "amount must be positive" shape; no state-transition broadcast; balances unchanged. + - **(b) Accept as fee-only**: transfer broadcasts; `balances[addr_2] == 0`; `addr_1` decreased by `fee` only. +- **Negative variants**: none — this case IS the zero-amount boundary. +- **Harness extensions required**: none. +- **Estimated complexity**: S +- **Rationale**: Zero-amount transfers are a classic boundary. The wallet's contract here is currently undocumented; whichever it is, an explicit case pins it. + +#### PA-004b — Sweep dust threshold boundary triplet +- **Priority**: P2 +- **Status**: IMPLEMENTED — passing (BELOW-gate sub-case only). The AT/JUST-ABOVE sub-cases collapse onto "broadcast attempted, broadcast failed" against the testnet fee market (chain-time fee ~`15_000_000` ≫ active gate of `100_000`); pinning them would leave a permanently-stuck testnet orphan with no recovery path. PA-004 already covers the well-above-fee path with `100_000_000`. The ACTIVE sweep gate is `min_input_amount` (`100_000`), not the `SWEEP_DUST_THRESHOLD = 5_000_000` referenced in the original scenario text — corrected at the implementation site. +- **Wallet feature exercised**: `framework/cleanup.rs` sweep gate at `min_input_amount` (active value: `100_000` credits via `PlatformVersion::latest().dpp.state_transitions.address_funds.min_input_amount`). +- **DET parallel**: none. +- **Preconditions**: bank-funded test wallet × 3 (one per boundary). +- **Scenario**: run three sub-cases independently, with wallet balance configured exactly: + 1. Balance == `SWEEP_DUST_THRESHOLD - 1` (i.e. `4_999_999`). Call cleanup. Assert sweep is **skipped** (registry status `Skipped`, no broadcast). + 2. Balance == `SWEEP_DUST_THRESHOLD` (i.e. `5_000_000`). Call cleanup. Assert sweep is **attempted** (broadcast emitted, bank credit observed minus fees). + 3. Balance == `SWEEP_DUST_THRESHOLD + 1` (i.e. `5_000_001`). Call cleanup. Assert sweep is **attempted**. +- **Assertions**: each sub-case asserts the registry status string and whether a state-transition was broadcast. The boundary at `==` must distinguish from `< threshold`. +- **Negative variants**: none. +- **Harness extensions required**: a way to configure a test wallet to hold an exact balance after fund + fee accounting (likely fund a slightly larger amount, then transfer the excess to a sink). May require the `TestWallet::transfer_with_inputs` helper (Wave F). +- **Estimated complexity**: M +- **Rationale**: The dust threshold is one of the few hard numeric gates in the cleanup path. Off-by-one at this boundary is the canonical bug class. + +#### PA-004c — Sweep with exactly zero balance +- **Priority**: P2 +- **Status**: IMPLEMENTED — passing with caveats. Spec asks for a `Skipped` registry status assertion but `framework/registry.rs::EntryStatus` exposes only `Active` / `Failed` (no `Skipped` variant). Spec also asks for a "no DAPI broadcast call made" counter or "absence of nonce consumption on the bank"; neither hook is wired in the harness today (broadcast counter would need an SDK instrumentation, and the test wallet — not the bank — is the one that would broadcast a sweep). Resolution: the test pins `Ok(()) + registry entry removed`, which together with `total_credits == 0` precondition is the strongest contract observable on the current harness; tightening to a positive "no broadcast" proof requires an SDK-level instrumentation hook that's out of scope for this PR. +- **Wallet feature exercised**: `framework/cleanup.rs` sweep path with empty inputs. +- **DET parallel**: none. +- **Preconditions**: bank-funded harness; test wallet seeded but never funded (or fully drained before cleanup). +- **Scenario**: + 1. Create a fresh `TestWallet`. Do not fund it. + 2. Call `setup_guard.teardown()`. +- **Assertions**: + - Cleanup returns `Ok(())`. + - Registry entry is removed after teardown (the dust-gate skip path completes the lifecycle even though the sweep isn't broadcast). The fictional `Skipped` registry status is a spec drift — see Status above. + - No broadcast attempted — observable today via the wallet's `total_credits == 0` precondition (combined with `cleanup.rs:171-178`'s explicit "skipping platform sweep" branch when total < dust_gate). A direct broadcast-counter assertion would require an SDK instrumentation hook. +- **Negative variants**: none. +- **Harness extensions required**: a "did we broadcast?" hook on the harness SDK, or a registry status accessor. +- **Estimated complexity**: S +- **Rationale**: A no-op cleanup must not throw. Without this case a refactor that moves the empty-input check could regress to `Err(InsufficientFunds)` and the test suite would never notice. + +#### PA-005b — `DEFAULT_GAP_LIMIT` triplet (19 / 20 / 21 unused) +- **Priority**: P2 +- **Status**: BLOCKED — needs production API: `PlatformAddressWallet::next_unused_receive_addresses(count)` wrapping `key_wallet::AddressPool::next_unused_multiple`. The current `next_unused_receive_address` parks on the lowest-unused index until observed-used; the 21-fund-and-derive workaround takes ~10 min runtime per sub-case (~30 s × 21 rounds × 3 sub-cases) and is operationally noisy. +- **Wallet feature exercised**: `wallet/platform_addresses/wallet.rs:180` gap-limit enforcement at `DEFAULT_GAP_LIMIT = 20`. +- **DET parallel**: none direct; PA-005 covers cursor rotation but not the gap-limit boundary. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: three sub-cases run on separate `TestWallet` instances: + 1. Derive **19** unused addresses (no funding). Then derive a 20th. Assert all 20 are returned without error or gap-limit growth event. + 2. Derive **20** unused addresses (no funding). Then derive a 21st. Pin the contract: either the wallet returns a typed `GapLimitExceeded` error, or it grows the limit (assert a `GapLimitGrown` event, or whatever the wallet exposes). + 3. Derive **21** unused addresses by request, asserting the same contract as (2). +- **Assertions**: each sub-case nails the wallet's contract at the `DEFAULT_GAP_LIMIT` boundary. +- **Negative variants**: none — this case is the boundary. +- **Harness extensions required**: a way to derive without funding (already supported via `next_unused_address` repeatedly; confirm cursor doesn't auto-park). +- **Estimated complexity**: M +- **Rationale**: PA-005's "21+ unused addresses" line is exploratory; PA-005b promotes it to an asserted boundary on each side of `DEFAULT_GAP_LIMIT`. + +#### PA-006b — Two concurrent broadcasts of identical ST bytes +- **Priority**: P2 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: nonce / replay-protection at the SDK / DAPI boundary. +- **DET parallel**: none. +- **Preconditions**: bank-funded test wallet; PA-006's `transfer_capturing_st_bytes` helper. +- **Scenario**: + 1. Fund `addr_1` and capture the encoded ST bytes for a transfer (do not broadcast yet). + 2. Spawn two concurrent `tokio::spawn` tasks each calling `sdk.broadcast_state_transition(captured_bytes)`. + 3. Await both. +- **Assertions**: + - Exactly one of the two futures returns success; the other returns the documented stale-nonce / already-exists / duplicate-broadcast error class. + - Final wallet state matches a single applied transfer (no double-debit). +- **Negative variants**: none. +- **Harness extensions required**: PA-006's `transfer_capturing_st_bytes`. +- **Estimated complexity**: M +- **Rationale**: PA-006 covers sequential replay; the race-condition variant is materially different code path inside the SDK / DAPI mempool. + +#### PA-007b — Two concurrent `sync_balances` on one wallet +- **Priority**: P2 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: `wallet/platform_addresses/sync.rs:24` reentrancy / internal locking. +- **DET parallel**: none. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Fund `addr_1` with `30_000_000`; wait for visibility. + 2. Spawn two concurrent `sync_balances()` futures on the same `TestWallet` handle. + 3. Await both. +- **Assertions**: + - Both futures return `Ok(())`. + - Post-state cached balance equals on-chain truth (not 2× — no double-counting). + - Sync watermark advanced exactly once net (no spurious double-bump). +- **Negative variants**: none. +- **Harness extensions required**: same accessor PA-007 already requires. +- **Estimated complexity**: M +- **Rationale**: PA-007 is sequential; double-counting under concurrent re-sync is a UI-tier hazard worth pinning. + +#### PA-008b — Two `TestWallet`s × three concurrent funders each +- **Priority**: P2 +- **Status**: IMPLEMENTED — passing. +- **Wallet feature exercised**: `framework/bank.rs::fund_address` cross-wallet contention. +- **DET parallel**: none. +- **Preconditions**: bank with `≥ 70_000_000 + 6 * fund_fee` credits. +- **Scenario**: + 1. Spin up two independent `TestWallet` instances, A and B. + 2. Derive `a1, a2, a3` on A and `b1, b2, b3` on B. + 3. Spawn six concurrent `bank.fund_address` calls (three on A's addresses, three on B's, each `10_000_000`). + 4. Await all six. +- **Assertions**: + - All six addresses end with the funded amount (no nonce collision across wallet boundaries). + - Total bank decrease == `60_000_000 + 6 * fund_fee`. + - No panic, no missing balances on any sub-set after sync. +- **Negative variants**: none. +- **Harness extensions required**: helper to instantiate two independent `TestWallet`s in one harness setup. +- **Estimated complexity**: M +- **Rationale**: PA-008 keeps contention inside one `TestWallet`; PA-008b proves the bank's serialisation works under cross-wallet contention too — the realistic CI shape. + +#### PA-008c — Observable serialisation of `FUNDING_MUTEX` +- **Priority**: P2 +- **Status**: IMPLEMENTED — passing. Harness instrumentation lives in `framework/bank.rs` (`FundingMutexHistoryEntry`, `BankWallet::funding_mutex_history`); each `fund_address` call records `(seq, entry_ns, exit_ns)` under the lock so the test asserts pairwise non-overlap of the critical sections. +- **Wallet feature exercised**: `framework/bank.rs::FUNDING_MUTEX` invariant. +- **DET parallel**: none. +- **Preconditions**: bank-funded test wallet; instrumentation hook on `FUNDING_MUTEX` (entry/exit timestamps or per-call sequence number). +- **Scenario**: + 1. Spawn three concurrent `bank.fund_address` tasks. + 2. Each task records its mutex-entry timestamp and mutex-exit timestamp via a test-only instrumentation hook. + 3. Await all three. +- **Assertions**: + - The three intervals `[entry_i, exit_i]` are pairwise non-overlapping (proves serialisation, not just correctness). + - Equivalently / additionally: the bank's funding-tx nonces are strictly monotonic in the same order as the mutex entries. +- **Negative variants**: none. +- **Harness extensions required**: an instrumentation hook on `framework/bank.rs` (test-only `cfg(test)` accessor for the mutex's last-entry sequence, or a `parking_lot::Mutex` instrumentation wrapper). +- **Estimated complexity**: M +- **Rationale**: PA-008 tests "all three calls succeed" — a future refactor that drops the mutex but happens to win the race in CI would still pass. PA-008c asserts the *mechanism* observably, so a silent removal of the mutex fails the test deterministically. + +#### PA-009 — `min_input_amount` boundary triplet for cleanup +- **Priority**: P2 +- **Status**: IMPLEMENTED — passing (BELOW-gate sub-case + version-source assertion). The unique contribution vs PA-004b is the version-source pin: the cleanup gate value equals `PlatformVersion::latest().dpp.state_transitions.address_funds.min_input_amount`, and the gate is positive. AT/JUST-ABOVE sub-cases are degenerate against the testnet fee market — see PA-004b status. +- **Wallet feature exercised**: `framework/cleanup.rs::min_input_amount`, sourced from `platform_version.dpp.state_transitions.address_funds.min_input_amount`. Test reads it via the new `framework/cleanup.rs::cleanup_dust_gate` accessor. +- **DET parallel**: none. +- **Preconditions**: bank-funded harness; test wallet × 3, each with a precisely tuned balance. +- **Scenario**: read `min` = `platform_version.dpp.state_transitions.address_funds.min_input_amount`. Run three sub-cases: + 1. Balance == `min - 1`. Call cleanup. Assert `Skipped` (cleanup must not attempt sweep). + 2. Balance == `min`. Call cleanup. Assert sweep is attempted (broadcast emitted; or fails with the documented "fee pushes below threshold" typed error). + 3. Balance == `min + 1`. Call cleanup. Assert sweep is attempted and succeeds. +- **Assertions**: each sub-case pins the cleanup status (`Skipped` vs attempted) and the typed error if the attempt fails. +- **Negative variants**: none. +- **Harness extensions required**: PA-004b's exact-balance setup helper; a way to read `min_input_amount` from the active `PlatformVersion` inside the test. +- **Estimated complexity**: M +- **Rationale**: `min_input_amount` is currently entirely uncovered. A protocol-version bump that changes the value would silently shift cleanup behaviour, with no failing test to flag the shift. + +#### PA-011 — Workdir slot exhaustion at `MAX_SLOTS + 1` +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (no test file in `tests/e2e/cases/` yet; needs sub-process orchestration or in-process `flock` simulation). +- **Wallet feature exercised**: `framework/workdir.rs` `flock`-based slot allocation; `MAX_SLOTS = 10`. +- **DET parallel**: none — operator-actionable harness contract. +- **Preconditions**: a clean workdir base path with no held slots. +- **Scenario**: + 1. Spawn `MAX_SLOTS` sub-processes (or `MAX_SLOTS` concurrent harness contexts within one process) that each acquire and hold a workdir slot. + 2. Spawn one additional (i.e. the 11th) harness context attempting to acquire a slot. +- **Assertions**: + - The first `MAX_SLOTS` acquisitions succeed and land on distinct slot indices. + - The 11th returns a typed `WorkdirError::NoAvailableSlots { tried, base_path }` (pin the variant name) within a bounded time — no silent infinite wait. + - Cleanup releases all slots; a subsequent acquisition succeeds. +- **Negative variants**: none. +- **Harness extensions required**: a typed error variant on `framework/workdir.rs` (likely already there; confirm name); a way to spawn sub-processes for the test, or simulate slot holders within one process via held `flock` guards. +- **Estimated complexity**: M +- **Rationale**: Slot exhaustion is the second most common "weird CI failure" mode after bank starvation. PA-011 makes its failure mode explicit. + +#### PA-012 — `sync_balances` racing with `transfer` +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (no test file in `tests/e2e/cases/` yet). +- **Wallet feature exercised**: internal locking between `wallet/platform_addresses/sync.rs:24` and `wallet/platform_addresses/transfer.rs:31`. +- **DET parallel**: none. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Bank-fund `addr_1` with `40_000_000`; wait. + 2. Spawn two concurrent tasks: `wallet.sync_balances()` and `wallet.transfer({addr_2: 5_000_000})`. + 3. Await both. +- **Assertions**: + - Both return `Ok(...)`. + - Final state is consistent with sequential execution: `balances[addr_2] == 5_000_000`, `balances[addr_1] == 40_000_000 - 5_000_000 - fee`. No "fee charged twice", no "in-flight transfer double-counted". + - The transfer's fee was computed against a non-stale balance view (i.e. no `InsufficientFunds` because `sync_balances` clobbered the cache mid-build). +- **Negative variants**: none. +- **Harness extensions required**: none beyond what PA-002 / PA-007 already need. +- **Estimated complexity**: M +- **Rationale**: Mobile clients call `sync_balances` aggressively while the user is typing into a transfer form. A regression where these two paths race silently produces wrong fees or stale balances; PA-012 pins the contract. + +#### PA-013 — Broadcast retry under transient DAPI 5xx +- **Priority**: P2 +- **Status**: BLOCKED — needs harness refactor: a controllable test DAPI proxy (httpmock-style) able to inject transient 5xx on `/broadcastStateTransition`. No test file yet. +- **Wallet feature exercised**: SDK retry policy on `broadcast_state_transition` under transient HTTP 5xx; downstream wallet state-finalisation on partial success. +- **DET parallel**: none direct; PA-007's negative variant covers a permanently-bogus URL only. +- **Preconditions**: a test-only DAPI proxy (or a `httpmock`-based DAPI stub) that returns `503 Service Unavailable` on the first call to `/broadcastStateTransition` and succeeds thereafter. +- **Scenario**: + 1. Bank-fund `addr_1`. + 2. Configure the harness SDK to point at the proxy. + 3. Issue a transfer. +- **Assertions**: + - Wallet returns `Ok(...)` despite the transient 5xx (assuming policy is to retry; if the policy is "fail fast and surface to caller", invert the assertion and document that contract). + - Final on-chain state shows the transfer applied exactly once (proxy's request log shows two POSTs — one 503, one 200; chain shows one ST). + - On the proof-fetch failure variant (DAPI succeeds on broadcast, 5xx on proof fetch): wallet either retries proof fetch, or returns a `BroadcastedAwaitingProof` typed result (whichever the contract defines). +- **Negative variants**: + - DAPI returns 5xx persistently → typed `NetworkError` after exhausted retries; cached wallet state unchanged. +- **Harness extensions required**: a controllable test DAPI proxy (Wave F-adjacent). This is non-trivial; mark as "blocked on test-DAPI-proxy infra" if unavailable. +- **Estimated complexity**: M +- **Rationale**: Transient 5xx is the most common production failure mode for thin-client SDKs. Without a deterministic test, retry policy drifts between "broken" and "infinite loop" and nobody notices until users complain. + +#### PA-014 — Multi-output at protocol-max output count +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (no test file yet; trivial once the `max_outputs` constant is read off `PlatformVersion`). +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31` at the protocol max-output boundary; payload-size limits in DPP / Drive. +- **DET parallel**: none. +- **Preconditions**: bank-funded test wallet with sufficient credits to fund N outputs (where N is the protocol max for `address_funds` outputs). +- **Scenario**: + 1. Discover the protocol-max output count from `platform_version.dpp.state_transitions.address_funds.max_outputs` (or the equivalent constant). + 2. Bank-fund `addr_1` with enough credits to cover N outputs of `100_000` each plus fees. + 3. Construct a transfer with exactly `max_outputs` destinations; submit. Record the result. + 4. Construct a transfer with `max_outputs + 1` destinations; submit. +- **Assertions**: + - At `max_outputs`: transfer succeeds; all N destinations reach the expected balance. + - At `max_outputs + 1`: wallet returns a typed `PayloadTooLarge` / `TooManyOutputs` validation error before broadcast (or, if the wallet attempts and DAPI rejects, the SDK error class is mapped to a typed wallet error). Pin which side enforces. +- **Negative variants**: none. +- **Harness extensions required**: ability to read `max_outputs` from the active platform version; a pool of `max_outputs + 1` distinct destination addresses (likely already available via `next_unused_address` on a fresh wallet). +- **Estimated complexity**: M +- **Rationale**: The wallet's only multi-output coverage today is "5 outputs". The actual upper limit is unmeasured; a protocol-version bump that changes `max_outputs` would silently shift behaviour, with regressions surfacing only in production state-transitions that are mysteriously rejected. + +### Identity (ID) + +#### ID-001 — Register identity funded from platform addresses +- **Priority**: P0 +- **Status**: Pass — `tests/e2e/cases/id_001_register_identity_from_addresses.rs` (drives `register_identity_from_addresses` and pins on-chain key count + balance bounds + post-fee residual). +- **Wallet feature exercised**: `wallet/identity/network/register_from_addresses.rs:65` (`IdentityWallet::register_from_addresses`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/identity_create.rs:13` (`test_create_identity`) — DET uses asset-lock; we use the address-funded variant explicitly. +- **Preconditions**: bank-funded test wallet; identity-signer harness extension landed. +- **Scenario**: + 1. Derive `addr_1`, bank-fund with `60_000_000`, wait for balance. + 2. Build a placeholder `Identity` with one `MASTER` ECDSA key and one `HIGH` ECDSA key derived via DIP-9 (identity index `0`). + 3. Call `IdentityWallet::register_from_addresses(identity, {addr_1: 50_000_000}, output: None, identity_index: 0, identity_signer, address_signer, settings: None)`. + 4. Wait for the identity to appear on-chain by `sdk.fetch::(identity.id())`. +- **Assertions**: + - Returned `Identity::id()` is non-zero and equals the on-chain fetched identity. + - On-chain identity public-keys count == 2. + - Identity balance == `50_000_000 - identity_create_fee` (`identity_create_fee > 0`). + - `addr_1` residual balance == `60_000_000 - 50_000_000 - tx_fee`. + - `IdentityManager::known_identities()` lists exactly this identity. +- **Negative variants**: + - `inputs` is empty → wallet returns `PlatformWalletError::InvalidIdentityData("At least one input address is required")` (already enforced at `register_from_addresses.rs:78`; assert exact message stability). + - Insufficient funds in input → SDK error class. + - Placeholder `Identity` with zero keys → identity-create transition rejection. +- **Harness extensions required**: + - `Signer` impl — Wave A (see §4). + - `TestWallet::register_identity_from_addresses(funding: Credits) -> Identity` helper that wraps the placeholder build + call. + - `wait_for_identity_balance(identity_id, expected, timeout)` helper. +- **Estimated complexity**: L (multi-file harness extension) +- **Rationale**: Highest-leverage Identity test. The address-funded path is currently exercised by no test anywhere in the workspace — FFI binds the asset-lock variant only. ID-001 is the gateway: every other Identity case (ID-002+) inherits the placeholder-Identity setup it builds. + +#### ID-001b — `setup_with_n_identities(N)` multi-identity helper +- **Priority**: P1 +- **Wallet feature exercised**: harness helper `setup_with_n_identities(n, funding_per)` chained over `IdentityWallet::register_from_addresses` for `n` consecutive DIP-9 identity indices. +- **DET parallel**: none direct. +- **Preconditions**: ID-001 helper landed; bank funded for `n × (funding_per + register_fee_headroom)`. +- **Scenario**: + 1. `let guard = setup_with_n_identities(3, 30_000_000).await?;` + 2. For each `i` in `0..3`, fetch `Identity::fetch(sdk, guard.identities[i].id)`. +- **Assertions**: + - The three `Identifier`s are pairwise distinct. + - The three `identity_index` values are `0`, `1`, `2` in registration order. + - Each fetched identity has `balance >= funding_per / 2` (post-fee threshold). + - The three identities' MASTER public keys are pairwise distinct (DIP-9 fan-out, not a copy-paste of slot 0). + - Bank's `total_credits()` decreased by `[n × funding_per, n × funding_per + n × fund_fee_upper_bound]`. +- **Negative variants**: + - `n == 0` → typed validation error. +- **Harness extensions required**: Wave A only. +- **Estimated complexity**: M +- **Rationale**: Multi-identity setup is the gateway for ID-003 / ID-008 and any future contact-graph or DashPay test. Pins the helper's nonce-discipline against `register_from_addresses`'s nonce-cache TODO regressing. + +#### ID-002 — Top-up identity from platform addresses +- **Priority**: P0 +- **Status**: Pass — `tests/e2e/cases/id_002_top_up_identity.rs` (post-top-up identity balance fetched on-chain, fee derived from delta, second-address residual asserted). +- **Wallet feature exercised**: `wallet/identity/network/top_up_from_addresses.rs:37`. +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/identity_tasks.rs:63` (`step_top_up_from_platform_addresses`). +- **Preconditions**: ID-001 setup helper; identity registered with starting balance. +- **Scenario**: + 1. Register identity per ID-001 (helper). + 2. Capture `pre_balance = identity.balance()` (post-registration). + 3. Bank-fund `addr_2` (a freshly derived address) with `30_000_000`. + 4. Call `top_up_from_addresses({addr_2: 25_000_000}, identity_id, …)`. + 5. Sync identity. +- **Assertions**: + - `post_balance == pre_balance + 25_000_000 - top_up_fee` + - `top_up_fee > 0` + - `addr_2` residual == `30_000_000 - 25_000_000 - tx_fee`. +- **Negative variants**: + - Top-up to non-existent identity id → typed error. + - Top-up with empty `inputs` map → typed validation error. +- **Harness extensions required**: same as ID-001 — Wave A. +- **Estimated complexity**: M +- **Rationale**: Validates the partner of ID-001. Together they cover the entire address-funded identity lifecycle entry surface. + +#### ID-003 — Identity-to-identity credit transfer +- **Priority**: P0 +- **Status**: Pass — `tests/e2e/cases/id_003_identity_to_identity_transfer.rs` (uses `setup_with_n_identities(2, …)`; pins receiver-side exact gain + sender-side loss > amount + non-zero fee). +- **Wallet feature exercised**: `wallet/identity/network/transfer.rs:74` (`transfer_credits_with_external_signer`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/identity_tasks.rs:238` (`step_transfer_credits`). +- **Preconditions**: ID-001 helper × 2 (two registered identities, both funded from same test wallet). +- **Scenario**: + 1. Register `identity_a` and `identity_b` (sequential ID-001 invocations on different addresses). + 2. Capture pre-balances. + 3. Transfer `10_000_000` credits from `identity_a` to `identity_b`. +- **Assertions**: + - `post_a == pre_a - 10_000_000 - transfer_fee`, `transfer_fee > 0` + - `post_b == pre_b + 10_000_000` + - `IdentityManager` reflects both new balances after sync. +- **Negative variants**: + - Transfer amount exceeds sender balance → typed error. + - Transfer to self (`identity_a -> identity_a`) → typed error. +- **Harness extensions required**: Wave A only (everything inherits ID-001). +- **Estimated complexity**: M +- **Rationale**: Confirms identity-balance bookkeeping in `ManagedIdentity` is bidirectional and idempotent. Pairs with ID-002 to cover the symmetric "credit increase" + "credit decrease" code paths. + +#### ID-003b — Concurrent identity-to-identity transfers serialise on identity nonce +- **Priority**: P2 +- **Wallet feature exercised**: `transfer_credits_with_external_signer` under concurrent invocation from the same source identity. +- **DET parallel**: none. +- **Preconditions**: ID-001b helper (multi-identity setup). +- **Scenario**: + 1. `let guard = setup_with_n_identities(3, 60_000_000).await?;` + 2. Spawn two `tokio::spawn` tasks from `guard.identities[0]` — task 1 transfers `5_000_000` to `guard.identities[1]`; task 2 transfers `7_000_000` to `guard.identities[2]`. + 3. `tokio::join!` on both. Record each task's `Result`. +- **Assertions**: + - Either both tasks succeed, OR exactly one task succeeds and the other returns a typed nonce-collision error from DAPI. Pin which contract the wallet implements. + - `post_sender == pre_sender - successful_amounts_total - successful_fees_total`. + - Sender identity revision is monotonic: `post_revision == pre_revision + count(successful transfers)` (no skipped, no duplicate). +- **Negative variants**: foreign signer signing for `sender`'s transition is covered by QA-001's regression test in `signer.rs`. +- **Harness extensions required**: Wave A; ID-001b helper. +- **Estimated complexity**: M +- **Rationale**: The identity-side parallel of PA-008b. Surface-discovery: pins whichever serialisation contract the wallet exposes today rather than asserting an aspirational one. + +#### ID-004 — Identity update: add and disable a key +- **Priority**: P1 +- **Status**: STUB — deferred to a follow-up PR. The harness's `SeedBackedIdentitySigner` only pre-derives keys for `key_index ∈ 0..DEFAULT_GAP_LIMIT`; signing the next transition with a freshly-issued key needs a `derive_identity_key`-driven cache-injection helper that does not exist yet (mirrors the `ID-flow-009` Blocked entry). +- **Wallet feature exercised**: `wallet/identity/network/update.rs:89` (`update_identity_with_external_signer`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/identity_tasks.rs:188` (`step_add_key`) and `tc_020_identity_mutation_lifecycle`. +- **Preconditions**: ID-001 helper. +- **Scenario**: + 1. Register identity with MASTER + HIGH keys (purpose AUTHENTICATION). + 2. Build a new HIGH ECDSA key (purpose AUTHENTICATION) — derive via identity-key derivation Wave A helper. + 3. Issue an `IdentityUpdateTransition` adding the new key. + 4. Issue a second update disabling the original HIGH key. + 5. Refresh identity from chain. +- **Assertions**: + - After step 3: identity has 3 keys, the new key is `is_disabled == false`. + - After step 4: original HIGH key has `disabled_at != None`; new HIGH key still active. + - MASTER key is untouched. +- **Negative variants**: + - Disable last MASTER key → typed error (CRITICAL/MASTER class invariant). + - Add key signed by non-MASTER → typed error. +- **Harness extensions required**: Wave A; plus a `derive_identity_key(identity_index, key_index, purpose, security_level)` test helper. +- **Estimated complexity**: L +- **Rationale**: Identity-update pathways have multiple silent failure modes (key-class restrictions, MASTER signing requirements). Recent commit `844eef74e8` ("token transitions require a CRITICAL signing key") shows this surface is actively changing — coverage prevents future regressions. + +#### ID-005 — Transfer credits from identity to platform addresses +- **Priority**: P1 +- **Status**: Pass — `tests/e2e/cases/id_005_identity_to_addresses_transfer.rs` (pins exact destination-address gain + identity loss > amount + on-chain post-balance equals wallet-returned `Credits`). +- **Wallet feature exercised**: `wallet/identity/network/transfer_to_addresses.rs:66`. +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/identity_tasks.rs:291` (`step_transfer_to_addresses`). +- **Preconditions**: ID-001 helper. +- **Scenario**: + 1. Register identity with `≥ 60_000_000` credits (ID-001 with larger funding). + 2. Derive `dest_addr` on the test wallet. + 3. Call `transfer_credits_to_addresses_with_external_signer(identity_id, {dest_addr: 20_000_000}, signer, settings: None)`. + 4. Sync test wallet balances. +- **Assertions**: + - `balances[dest_addr] == 20_000_000` + - Identity balance decreased by `20_000_000 + transfer_fee`. + - Returned `Credits` value equals on-chain transferred amount (the wallet returns the post-fee `Credits` — assert matches `20_000_000`). +- **Negative variants**: + - Transfer to malformed `PlatformAddress` (P2SH that the harness cannot sign for is fine here — it's the destination, not the source) → SDK accepts it; assert balance shows up. + - Insufficient identity balance → typed error. +- **Harness extensions required**: Wave A only. +- **Estimated complexity**: M +- **Rationale**: Closes the ID surface — combined with ID-002 (addresses → identity) and ID-005 (identity → addresses), this exercises the full money-flow loop that wallets actually need to demo. + +#### ID-006 — Refresh and load identity by index +- **Priority**: P1 +- **Status**: STUB — deferred to a follow-up PR. The "rebuild a fresh `TestWallet` from the same seed and run discovery" path needs a `TestWallet::from_seed_bytes` helper that does not exist today; `load_identity_by_index` itself is exercised by the orphan-recovery branch of `cleanup::sweep_identities_with_seed` but not by a dedicated assertion-bearing test. +- **Wallet feature exercised**: `wallet/identity/network/loading.rs:28` (`load_identity_by_index`); `loading.rs:162` (`refresh_identity`); `discovery.rs:79` (`discover`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/identity_tasks.rs:350` (`tc_025_refresh_identity`); `identity_tasks.rs:420` (`tc_027_load_identity`); `identity_tasks.rs:585` (`tc_031_incremental_address_discovery`). +- **Preconditions**: ID-001 helper. +- **Scenario**: + 1. Register identity via ID-001 at `identity_index = 0`. + 2. Drop the test-wallet handle; rebuild a fresh `TestWallet` from the same seed. + 3. Call `discover()` to walk identity indices 0..n until none found. + 4. Call `load_identity_by_index(0)`. + 5. Mutate something off-band (e.g. issue a top-up via ID-002) and call `refresh_identity`. +- **Assertions**: + - `discover()` returns exactly the registered identity. + - `load_identity_by_index(0)` populates the local `IdentityManager` with id, balance, and key set matching the on-chain identity. + - Post-`refresh_identity`, the cached balance reflects the top-up. +- **Negative variants**: + - `load_identity_by_index(1)` for a non-existent identity at that index → returns `Ok(None)` (assert) or typed `NotFound` (whichever the contract specifies — this case will surface that contract). +- **Harness extensions required**: Wave A; helper to rebuild a `TestWallet` from a stored seed (the registry already stores `seed_hex`). +- **Estimated complexity**: M +- **Rationale**: Wallet restart / identity rediscovery is the most-hit path in mobile apps and the most-broken-by-protocol-bumps. ID-006 catches discovery regressions deterministically. + +#### ID-001c — Non-default `StateTransitionSettings` +- **Priority**: P2 +- **Status**: STUB — P2 deferred. The harness has no "did we wait for proof?" hook today; ID-001c is the right place to add one but lands after the P0/P1 bring-up. +- **Wallet feature exercised**: `wallet/identity/network/register_from_addresses.rs:65`'s `settings: Option` argument; non-default values (e.g. `wait_for_proof = false`, fee multiplier override, signing-key override). +- **DET parallel**: none. +- **Preconditions**: ID-001 helper. +- **Scenario**: register an identity exactly as ID-001 except pass a non-default `StateTransitionSettings`. Run two sub-cases: + 1. `settings: Some(StateTransitionSettings { wait_for_proof: false, .. })`. Expect the call to return as soon as broadcast succeeds, without blocking on proof. + 2. `settings: Some(StateTransitionSettings { fee_multiplier: , .. })`. Expect the on-chain fee to scale by the configured multiplier. +- **Assertions**: + - Sub-case (1): the call's wall-clock duration is bounded below by network RTT and above by a `proof_wait_timeout` it should not have hit; cached identity is "broadcasted, awaiting proof"; on next sync the proof is observed and the change-set finalised. + - Sub-case (2): observed on-chain fee scales as documented (within rounding). +- **Negative variants**: none. +- **Harness extensions required**: Wave A; a "did we wait for proof?" hook on the harness SDK (or a wall-clock-bound check). +- **Estimated complexity**: M +- **Rationale**: Every existing Identity / DPNS / DashPay test passes `settings: None`. The `Some` branch is entirely uncovered; without ID-001c, settings-related fields can be silently misrouted. + +#### ID-005b — `transfer_credits_to_addresses` with empty outputs +- **Priority**: P2 +- **Status**: STUB — P2 deferred; pins the empty-`outputs` validation error message after the P0/P1 cohort lands. +- **Wallet feature exercised**: `wallet/identity/network/transfer_to_addresses.rs:66` validation gate. +- **DET parallel**: none. +- **Preconditions**: ID-001 helper; identity with non-zero balance. +- **Scenario**: + 1. Register an identity per ID-001 with starting balance `≥ 50_000_000`. + 2. Call `transfer_credits_to_addresses_with_external_signer(identity_id, {}, signer, None)` — empty output map. +- **Assertions**: + - Returns a typed validation error of "at least one output is required" shape (mirror the ID-001 negative-variant message style; pin the exact variant or message). + - No state-transition broadcast. + - Identity balance unchanged. +- **Negative variants**: none — this case IS the empty-input variant. +- **Harness extensions required**: Wave A only. +- **Estimated complexity**: S +- **Rationale**: ID-001 already pins the empty-`inputs` error message exactly. ID-005b mirrors that pin on the empty-`outputs` side, which is currently uncovered. + +#### ID-006b — Identity-key derivation index boundary +- **Priority**: P2 +- **Status**: STUB — P2 deferred; needs the `derive_identity_key` helper exposure for `key_index` (sibling of ID-004's blocked helper). +- **Wallet feature exercised**: identity-key derivation under `wallet/identity/network/identity_handle.rs::derive_ecdsa_identity_auth_keypair_from_master` at `key_index` boundaries. +- **DET parallel**: none direct. +- **Preconditions**: ID-001 helper. +- **Scenario**: + 1. Register an identity with `key_index = 0`. Verify on-chain that the registered HIGH key matches `derive_identity_key(.., key_index = 0, ..)`. + 2. Register a second identity (or `update_identity` add-key on the same identity) with `key_index = DEFAULT_GAP_LIMIT - 1`. Verify the registered key matches the corresponding derivation. + 3. Optionally: attempt `key_index = DEFAULT_GAP_LIMIT` and pin the contract (rejected vs gap grown). +- **Assertions**: each sub-case asserts that the on-chain key bytes match the off-chain DIP-9 derivation at the boundary index. +- **Negative variants**: none. +- **Harness extensions required**: Wave A's `derive_identity_key` helper exposed for `key_index` (in addition to `identity_index`). +- **Estimated complexity**: M +- **Rationale**: ID-006 covers `identity_index` boundaries; `key_index` is the parallel axis and currently uncovered. + +### Tokens (TK) + +The wallet has token operations on the API surface +(`wallet/tokens/wallet.rs` + `wallet/identity/network/tokens/*`). They all +require an existing on-testnet token contract and an authorised identity. +Without a contract-registry strategy, only TK-001/TK-002 (operations on +existing balances) are achievable in P0/P1. + +#### TK-001 — Token transfer between two identities +- **Priority**: P1 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave D — token contract operator config). +- **Wallet feature exercised**: `wallet/identity/network/tokens/transfer.rs:21` (`token_transfer_with_signer`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/token_tasks.rs:359` (`step_transfer`). +- **Preconditions**: ID-001 helper; **a known testnet token contract** (env-driven `PLATFORM_WALLET_E2E_TOKEN_CONTRACT_ID` + `_TOKEN_POSITION`); the registered identity must already hold a non-zero balance of that token (operator pre-funds via the same flow used to fund the bank). +- **Scenario**: + 1. Register `identity_a` and `identity_b` per ID-001. + 2. Pre-condition: operator pre-funds `identity_a` with `≥ 100` tokens of the configured contract (one-time setup, similar to bank funding). + 3. Call `token_transfer_with_signer(identity_a, contract_id, token_position, identity_b, amount=50)`. + 4. Sync token balances on both. +- **Assertions**: + - `identity_a` token balance decreased by exactly `50`. + - `identity_b` token balance increased by exactly `50`. + - `identity_a` credit balance decreased by `transfer_fee` (token transfer pays in credits, not in tokens). +- **Negative variants**: + - Transfer amount exceeds sender token balance → typed error. + - Transfer with wrong `token_position` → contract-validation error. +- **Harness extensions required**: + - Wave A (Identity signer). + - `Config::token_contract_id` + `token_position` env vars. + - `TestWallet::token_balance(identity_id, contract_id, token_pos)` helper. + - Operator documentation: how to pre-fund tokens (one-time, sibling of bank pre-funding). +- **Estimated complexity**: L +- **Rationale**: Most-used token op. Catches token-amount underflow bugs and credit-fee accounting bugs in one shot. + +#### TK-001b — Token transfer of amount 0 +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave D). +- **Wallet feature exercised**: `wallet/identity/network/tokens/transfer.rs:21` zero-amount boundary. +- **DET parallel**: none. +- **Preconditions**: TK-001 setup (two identities with non-zero token balance on `identity_a`). +- **Scenario**: call `token_transfer_with_signer(identity_a, contract_id, token_position, identity_b, amount=0)`. +- **Assertions**: pin one contract: + - **(a) Reject**: typed validation error of "amount must be positive" shape; no broadcast; balances unchanged. + - **(b) Accept**: broadcast succeeds; both token balances unchanged; only `identity_a` credit balance decreased by `transfer_fee`. +- **Negative variants**: none. +- **Harness extensions required**: TK-001 extensions. +- **Estimated complexity**: S +- **Rationale**: Zero-amount transfers may be valid no-ops or invalid per contract. Either contract needs an asserted test. + +#### TK-002 — Token claim (perpetual / pre-programmed distribution) +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave D). +- **Wallet feature exercised**: `wallet/identity/network/tokens/claim.rs:18` (`token_claim_with_signer`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/token_tasks.rs:702` (`tc_064_estimate_perpetual_rewards`) and `step_*` token lifecycle. +- **Preconditions**: TK-001 setup + a token contract that grants the registered identity claim rights. +- **Scenario**: + 1. Register identity per ID-001. + 2. Wait for the perpetual-distribution interval to advance. + 3. Call `token_claim_with_signer`. +- **Assertions**: + - Token balance increases by the documented per-interval claim amount (operator-supplied env `PLATFORM_WALLET_E2E_TOKEN_CLAIM_AMOUNT`). + - Second claim within the same interval returns a typed "already claimed" error. +- **Negative variants**: claim with no rights → typed error. +- **Harness extensions required**: TK-001 extensions + interval-aware sleep helper (10–60 s). +- **Estimated complexity**: L +- **Rationale**: Perpetual-distribution bugs are silent — balance just doesn't increase. Adding claim coverage is the only way to surface those. + +#### TK-003 — Token mint (authorised identity) +- **Priority**: P2 (gated) +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave D; gated on a token contract whose mint authorisation can be assigned to a test identity). +- **Wallet feature exercised**: `wallet/identity/network/tokens/mint.rs:19`. +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/token_tasks.rs:305` (`step_mint`). +- **Preconditions**: TK-001 setup + the registered identity is on the contract's mint allow-list. +- **Scenario**: mint `100` of token to self; sync. +- **Assertions**: identity token balance increased by `100`; total supply increased. +- **Negative variants**: mint without authority (TK-001's `identity_b`) → unauthorised error (DET parallel: `tc_065_mint_unauthorized` at `token_tasks.rs:756`). +- **Harness extensions required**: TK-001 extensions. +- **Estimated complexity**: M +- **Rationale**: Mint-without-authority is the canonical token authz failure mode. + +#### TK-004 — Token burn +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave D). +- **Wallet feature exercised**: `wallet/identity/network/tokens/burn.rs` (mod-level fn at `tokens/mod.rs`). +- **DET parallel**: `token_tasks.rs:330` (`step_burn`). +- **Preconditions**: TK-001 setup with non-zero balance. +- **Scenario**: burn `25` tokens; sync. +- **Assertions**: identity token balance decreased by `25`; total supply decreased. +- **Negative variants**: burn more than balance → typed error. +- **Harness extensions required**: TK-001 extensions. +- **Estimated complexity**: M +- **Rationale**: Symmetric partner of TK-003; together they validate supply bookkeeping. + +### Core / SPV (CR) + +All Core cases are gated on Task #15 (SPV stabilisation). They are spec'd here +so that when SPV lands, the test bodies can be written without further design. + +#### CR-001 — SPV mn-list sync readiness +- **Priority**: P1 (post-Task #15) +- **Status**: BLOCKED — needs harness refactor: SPV runtime re-enablement (Task #15). The harness currently runs with `spv_runtime: None` and a `TrustedHttpContextProvider` (see `harness.rs:148`). +- **Wallet feature exercised**: `manager::accessors::spv()` returning a started `SpvRuntime`; mn-list sync internals. +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/spv_wallet.rs:14` (`test_spv_sync_and_create_wallet`). +- **Preconditions**: SPV enabled in `harness::E2eContext::build` (uncomment block at `harness.rs:200-218`). +- **Scenario**: + 1. Wait `<= 180s` for `spv::wait_for_mn_list_synced` to return. + 2. Read mn-list height. +- **Assertions**: mn-list height > 0; SPV runtime reports `Ready` state. +- **Negative variants**: zero peers reachable → harness fails fast with explicit error (not a silent infinite wait). +- **Harness extensions required**: re-enable `SpvContextProvider` swap; add a `SpvHealth::status() -> Enum` accessor to the manager. +- **Estimated complexity**: M +- **Rationale**: Foundation for every other Core test — guarantees the SPV layer is alive before any Core operation runs. + +#### CR-002 — Core wallet receive address derivation +- **Priority**: P1 (post-Task #15) +- **Status**: BLOCKED — needs harness refactor: SPV runtime re-enablement (Task #15). +- **Wallet feature exercised**: `wallet/core/wallet.rs:59` (`next_receive_address_for_account`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/core_tasks.rs:14` (`test_tc001_refresh_wallet_info_core_only`). +- **Preconditions**: CR-001 ready. +- **Scenario**: derive 5 receive addresses on account `0`; assert distinctness; assert `network() == bank.network()`. +- **Assertions**: 5 distinct `Address`es; consistent network prefix. +- **Negative variants**: derive on non-existent account → typed error. +- **Harness extensions required**: SPV-backed `TestCoreWallet` helper. +- **Estimated complexity**: M +- **Rationale**: Catches Core-account derivation regressions independently of broadcast/sync. + +#### CR-003 — Asset-lock-funded identity registration (full path) +- **Priority**: P2 (post-Task #15) +- **Status**: BLOCKED — needs harness refactor: SPV runtime + Core-UTXO funded test wallet (Task #15). Bank wallet today holds platform credits, not Core coins. +- **Wallet feature exercised**: `wallet/asset_lock/build.rs:39` + `wallet/identity/network/registration.rs:240` (`register_identity_with_signer`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/core_tasks.rs:132` (`test_tc004_create_registration_asset_lock`). +- **Preconditions**: CR-001 + a Core-funded test wallet (operator funds via testnet faucet). +- **Scenario**: build asset-lock tx; wait for instant-lock; register identity. +- **Assertions**: identity exists on-chain; asset-lock recorded in `tracked_asset_locks`; Core balance decreased by lock amount + fee. +- **Negative variants**: insufficient Core balance; chain re-org of asset-lock tx (P2 — manual). +- **Harness extensions required**: faucet adapter; Core-funded wallet helper. +- **Estimated complexity**: L +- **Rationale**: Mirrors DET's existing canonical Identity-create coverage. Lower priority than ID-001 because address-funded is the path with no other coverage in the workspace. + +### Contracts (CT) + +#### CT-001 — Document put: deploy a fixture data contract +- **Priority**: P1 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave C — contract fixture loader). +- **Wallet feature exercised**: `wallet/identity/network/contract.rs:124` (`create_data_contract_with_signer`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/fetch_contract.rs` (read side); DET writes via `register_contract.rs` backend task. +- **Preconditions**: ID-001 helper; fixture contract JSON at `tests/fixtures/contracts/minimal.json`. +- **Scenario**: + 1. Register identity per ID-001. + 2. Load contract JSON (one document type, two scalar fields). + 3. Call `create_data_contract_with_signer(contract, identity_id, signer)`. + 4. Fetch contract via `sdk.fetch::(contract.id())`. +- **Assertions**: + - On-chain contract id matches local id. + - Document-type schema round-trips byte-equal (canonical CBOR). + - Identity credit balance decreased by `contract_create_fee > 0`. +- **Negative variants**: re-deploy the same contract → typed "already exists" error. +- **Harness extensions required**: Wave A; `tests/fixtures/contracts/minimal.json`. +- **Estimated complexity**: M +- **Rationale**: Establishes the contract-fixture pattern. CT-002/003 build on it. + +#### CT-002 — Document put / replace lifecycle +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave C). +- **Wallet feature exercised**: `dash_sdk::platform::Document::{put,replace}` invoked via the SDK directly (the wallet doesn't wrap document put). +- **DET parallel**: DET's `backend_task::document.rs`. +- **Preconditions**: CT-001 contract deployed; identity from ID-001. +- **Scenario**: put a document; mutate one field; replace; fetch. +- **Assertions**: replaced document version increments; field value matches. +- **Negative variants**: replace with wrong revision → typed error. +- **Harness extensions required**: thin SDK-direct helper (no wallet API). +- **Estimated complexity**: M +- **Rationale**: Documents are the actual user-facing primitive — coverage of put/replace catches schema-validation regressions in DPP. + +#### CT-003 — Contract update (add document type) +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave C). +- **Wallet feature exercised**: `update_data_contract` flow via SDK + identity signer. +- **DET parallel**: DET's `backend_task::update_data_contract.rs`. +- **Preconditions**: CT-001 contract deployed. +- **Scenario**: update contract to add a second document type; fetch and verify. +- **Assertions**: contract version incremented; new document type queryable. +- **Negative variants**: incompatible schema change (remove required field) → typed validation error. +- **Harness extensions required**: contract-update SDK helper. +- **Estimated complexity**: M +- **Rationale**: Contract-update validation is a known sharp edge — explicit coverage prevents subtle DPP changes from breaking deployed contracts silently. + +### DPNS + +#### DPNS-001 — Register and resolve a `.dash` name +- **Priority**: P0 +- **Status**: STUB — placeholder for follow-up PR (Wave A + DPNS helpers). +- **Wallet feature exercised**: `wallet/identity/network/dpns.rs:176` (`register_name_with_external_signer`); `dpns.rs:281` (`resolve_name`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/register_dpns.rs:14` (`test_register_dpns_name`). +- **Preconditions**: ID-001 helper; identity has `≥ 100_000_000` credits (DPNS register fee + headroom). +- **Scenario**: + 1. Register identity with sufficient balance. + 2. Generate random name `e2e-<8 random hex>.dash`. + 3. Call `register_name_with_external_signer(name, identity_id, signer, settings: None)`. + 4. Wait for `resolve_name(name)` to return `Some(identity_id)`. +- **Assertions**: + - `resolve_name` returns the registering identity's id. + - `sync_dpns_names()` lists the name on the identity. + - Identity credit balance decreased by `dpns_fee > 0`. +- **Negative variants**: + - Re-register the same name → typed `AlreadyExists` error. + - Register a name not ending in `.dash` → typed validation error. + - Register a name shorter than 3 chars or longer than 63 → typed validation error. +- **Harness extensions required**: Wave A; random-name helper (cryptographic RNG, lower-case alphanumeric). +- **Estimated complexity**: M +- **Rationale**: DPNS register is the most user-visible Platform feature after Identity. DPNS-001 is also the gateway to Dashpay (DP-001 needs a DPNS name). + +#### DPNS-001b — Name-length boundary quartet (2 / 3 / 63 / 64 chars) +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + DPNS helpers). +- **Wallet feature exercised**: DPNS name-length validation at `wallet/identity/network/dpns.rs:176`. +- **DET parallel**: none. +- **Preconditions**: ID-001 helper; identity with sufficient credits to register a DPNS name. +- **Scenario**: four sub-cases, each with a fresh DPNS-eligible identity (or the same identity if the wallet permits multiple names): + 1. Name length **2** chars (`xy.dash` — 2-char label). Expect typed validation error. + 2. Name length **3** chars (`xyz.dash`). Expect contested-name flow OR success (depends on protocol; pin which). + 3. Name length **63** chars (max-allowed label, all alphanumeric). Expect success. + 4. Name length **64** chars. Expect typed validation error. +- **Assertions**: each sub-case nails accept/reject and the typed error variant on rejection. +- **Negative variants**: none — this case IS the boundary set. +- **Harness extensions required**: Wave A; the random-name helper extended to take an explicit length. +- **Estimated complexity**: M +- **Rationale**: DPNS-001's negative variants list "shorter than 3 or longer than 63" but never pin the exact boundaries. Off-by-one at name-length is the canonical DPNS bug class. + +#### DPNS-001c — DPNS name with a multibyte character +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + DPNS helpers). +- **Wallet feature exercised**: DPNS name validation / canonicalisation at `wallet/identity/network/dpns.rs:176`. +- **DET parallel**: none. +- **Preconditions**: ID-001 helper; identity with sufficient credits. +- **Scenario**: register a name containing a multibyte character (e.g. `naive.dash` with `i` replaced by `ï`, or `cafe.dash` with `e` → `é`). Submit. Pin the contract: + - **(a) Accept-and-canonicalise**: name normalised to ASCII (e.g. via Punycode / IDN-ASCII); subsequent `resolve_name` returns the canonical form. + - **(b) Reject**: typed validation error of "ASCII-only" / "invalid character" shape. +- **Assertions**: nail one of (a) or (b). If (a), assert the canonical form matches the documented rule; if (b), assert the error variant. +- **Negative variants**: none. +- **Harness extensions required**: Wave A. +- **Estimated complexity**: S +- **Rationale**: Whichever contract the wallet implements, an explicit pin prevents future protocol-version drift from silently flipping it. + +#### DPNS-002 — Resolve a known external name (negative-only assertion) +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (no identity needed; resolver-only). Trivial once a DPNS resolution helper lands. +- **Wallet feature exercised**: `dpns.rs:281` (`resolve_name`). +- **DET parallel**: `register_dpns.rs` resolve-side. +- **Preconditions**: none beyond network reachability. +- **Scenario**: resolve a fixed never-registered name `definitely-does-not-exist-.dash`. +- **Assertions**: returns `None` (not an error). +- **Negative variants**: malformed name (no `.dash` suffix) → typed validation error. +- **Harness extensions required**: none (DPNS-001's signer setup not required here). +- **Estimated complexity**: S +- **Rationale**: Confirms DPNS resolve handles the "name doesn't exist" path without surfacing it as a hard error — easy to regress when DPNS schema evolves. + +### Dashpay (DP) + +#### DP-001 — Set DashPay profile +- **Priority**: P1 +- **Status**: STUB — placeholder for follow-up PR (Wave A). +- **Wallet feature exercised**: `wallet/identity/network/profile.rs:237` (`create_profile_with_external_signer`). +- **DET parallel**: `dash-evo-tool/tests/backend-e2e/dashpay_tasks.rs:48` (`tc_032_update_profile`). +- **Preconditions**: ID-001 + DPNS-001 (identity has a DPNS name). +- **Scenario**: create profile with `display_name = "Marvin"` and `public_message`; sync profile back. +- **Assertions**: profile fetched from chain has matching `display_name` and `public_message`; profile timestamp non-zero. +- **Negative variants**: profile `display_name` exceeding length limit → typed validation error. +- **Harness extensions required**: Wave A. +- **Estimated complexity**: M +- **Rationale**: Profile is the simplest DashPay write — establishes the pattern other DashPay operations (DP-002, DP-003) reuse. + +#### DP-001b — Profile with optional fields `None` vs `Some` +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A). +- **Wallet feature exercised**: `wallet/identity/network/profile.rs:237` partial-profile semantics. +- **DET parallel**: none direct. +- **Preconditions**: ID-001 + DPNS-001. +- **Scenario**: two sub-cases on the same identity (or on two identities if the wallet enforces single-profile-per-identity): + 1. Create profile with `display_name = None, public_message = Some("hello")`. Sync; fetch. + 2. Create profile with `display_name = Some("Marvin"), public_message = None`. Sync; fetch. +- **Assertions**: + - Fetched profile preserves the `None`/`Some` distinction byte-for-byte (a `None` field comes back as absent, not as empty string `""`). + - Sub-case (1) post-sync: `display_name == None`, `public_message == Some("hello")`. + - Sub-case (2) post-sync: `display_name == Some("Marvin")`, `public_message == None`. +- **Negative variants**: none. +- **Harness extensions required**: Wave A. +- **Estimated complexity**: M +- **Rationale**: DashPay profile is a partial-update primitive in production; conflating `None` with `Some("")` would silently break all clients that use either default presentation. + +#### DP-001c — Profile `display_name` containing emoji / RTL text +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A). +- **Wallet feature exercised**: `wallet/identity/network/profile.rs:237` UTF-8 round-trip. +- **DET parallel**: none. +- **Preconditions**: ID-001 + DPNS-001. +- **Scenario**: create a profile with `display_name = "Marvin 🤖"` (emoji) and an additional sub-case with an RTL string (e.g. Hebrew or Arabic text). Sync; fetch. +- **Assertions**: + - Fetched `display_name` is byte-equal to the input (including the emoji code-points and any RTL embedding marks). + - No silent normalisation that loses information. + - Length validation operates on grapheme clusters or bytes (whichever the contract specifies); pin which. +- **Negative variants**: none. +- **Harness extensions required**: Wave A. +- **Estimated complexity**: S +- **Rationale**: UTF-8 round-trip in user-displayed fields is a quiet hazard — losing emoji or RTL marks bricks user-presented identity strings without surfacing as an error. + +#### DP-002 — Send and accept a contact request +- **Priority**: P1 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave B for two identities). +- **Wallet feature exercised**: `contact_requests.rs:91` (`send_contact_request_with_external_signer`); `contact_requests.rs:466` (`accept_contact_request_with_external_signer`). +- **DET parallel**: `dashpay_tasks.rs:546` (`tc_037_dashpay_contact_lifecycle`). +- **Preconditions**: two registered identities (ID-001 × 2); DPNS names on both (DPNS-001 × 2); both have profiles (DP-001 × 2). +- **Scenario**: + 1. From `identity_a`: send contact request to `identity_b`. + 2. From `identity_b`: list contact requests; accept the inbound request. + 3. Sync established contacts on both sides. +- **Assertions**: + - `identity_a.sent_contact_requests()` lists the request. + - `identity_b.sync_contact_requests()` returns the inbound request. + - After acceptance, `established_contacts()` on both identities includes the other. +- **Negative variants**: + - Send contact request to non-existent identity → typed error. + - Accept already-accepted request → typed `AlreadyExists` or idempotent success (assert which contract the wallet defines). + - Send self-contact request → typed validation error. +- **Harness extensions required**: Wave A; helper to spin up two identities in one `setup()`. +- **Estimated complexity**: L +- **Rationale**: Most non-trivial multi-identity flow on the wallet. Catches handshake regressions in `contact_requests.rs` end-to-end. + +#### DP-003 — Send a DashPay payment +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + Wave B). +- **Wallet feature exercised**: `wallet/identity/network/payments.rs:92` (`send_payment`). +- **DET parallel**: covered indirectly by `dashpay_tasks.rs::tc_041_load_payment_history_empty` and DET's payment broadcast tests. +- **Preconditions**: DP-002 (two contacts established). +- **Scenario**: send a Dashpay payment from `identity_a` to `identity_b`'s contact-derived address; sync `identity_b`. +- **Assertions**: `identity_b.try_record_incoming_payment(...)` returns `Some` for the corresponding tx; payment amount matches sent. +- **Negative variants**: payment to a stranger (no contact relationship) → typed error. +- **Harness extensions required**: DP-002 setup; Wave A. +- **Estimated complexity**: L +- **Rationale**: End-to-end DashPay payment flow. Without this, payment-derivation regressions only surface in production. + +### Contested Names (CN) + +Contested-name auctions span minutes-to-hours on testnet and require multiple +identities voting in lockstep. Both factors push them into P2 (or "deferred to +DET parity") rather than P0/P1. Two cases are stubbed for completeness. + +#### CN-001 — Initiate a contested DPNS name (premium / 3-char) +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (Wave A + DPNS contest helpers). +- **Wallet feature exercised**: `dpns.rs:176` register pathway with a contested name; `dpns.rs:425` (`contest_vote_state`). +- **DET parallel**: DET `backend_task::contested_names`. +- **Preconditions**: DPNS-001 + identity with extra credits. +- **Scenario**: register a 3-character name (`xy.dash`); query `contest_vote_state`; assert state is `Active` with the registering identity as a contender. +- **Assertions**: contest state is `Active`; registering identity present in contender list. +- **Negative variants**: query `contest_vote_state` on a non-contested name → returns `None` / `Closed`. +- **Harness extensions required**: Wave A; long-timeout polling helper. +- **Estimated complexity**: L +- **Rationale**: Smoke-tests the contest entry point without committing to the full multi-day auction flow. + +#### CN-002 — Cast a masternode vote on a contested name (DEFERRED) +- **Priority**: P2 (out-of-scope today) +- **Status**: BLOCKED — needs harness refactor: masternode signer + operator-controlled mn-list participation. Re-evaluate once a regtest-with-masternodes harness is in scope. +- **Reason for deferral**: requires a masternode signer and operator-controlled mn-list participation; harness has no way to drive that today. +- **Action**: keep this row as a placeholder; revisit when a regtest-with-masternodes harness is in scope. + +### Harness self-tests (Harness) + +Cases in this subsection exercise the test harness itself (registry +serialisation, async cancellation safety, workdir isolation), not the wallet. +They live here because their failures masquerade as wallet bugs and the only +sane place to pin the harness contract is alongside the wallet contract. + +#### Harness-G1a — Corrupted registry JSON: refuse to overwrite +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (pure-harness unit test on `framework/registry.rs`; no chain access required). +- **Wallet feature exercised**: `framework/registry.rs` parse + lock-file flow. +- **DET parallel**: none. +- **Preconditions**: clean workdir; ability to seed the registry file with arbitrary bytes before harness startup. +- **Scenario**: + 1. Pre-seed `registry.json` with valid JSON for one entry, followed by trailing garbage (`\n}}}`). + 2. Start the harness (e.g. invoke `setup()`). +- **Assertions**: + - Harness returns a typed `RegistryError::ParseError { path, byte_offset }` (pin the variant; `byte_offset` should be near the trailing garbage). + - Harness does **not** overwrite the on-disk registry file (preserve user data; assert file bytes unchanged after the failed start). + - The lock-file (`.lock`) is released cleanly so a subsequent run that fixes the file can proceed. +- **Negative variants**: none. +- **Harness extensions required**: a typed parse-error variant on `framework/registry.rs` (likely already there; confirm name); a test setup that seeds the registry file before harness start. +- **Estimated complexity**: M +- **Rationale**: When the registry serialisation format changes, stale registry files in CI shouldn't silently corrupt user data. Harness-G1a pins refuse-to-overwrite as the contract. + +#### Harness-G1b — Registry forward-compatible unknown field +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (pure-harness unit test on `framework/registry.rs`). +- **Wallet feature exercised**: `framework/registry.rs` deserialisation tolerance. +- **DET parallel**: none. +- **Preconditions**: clean workdir; ability to pre-seed registry contents. +- **Scenario**: + 1. Pre-seed `registry.json` with a valid entry that includes a future-version field (e.g. `"unknown_field": "future-value"`). + 2. Start the harness; let it perform a normal write that round-trips the registry. +- **Assertions**: + - Harness loads the registry without error. + - On rewrite, the `unknown_field` is preserved byte-equal (forward-compatible: don't strip fields the current code doesn't understand). + - Tests that depend on the entry continue to operate. +- **Negative variants**: none. +- **Harness extensions required**: registry serde must use `#[serde(other)]` / a catch-all field, or otherwise round-trip unknown keys. Confirm or implement. +- **Estimated complexity**: S +- **Rationale**: Without forward-compat, the moment two CI workers run different versions of the harness against a shared registry, fields get silently stripped. + +#### Harness-G4 — Drop `wallet.transfer` future mid-flight, recover on next sync +- **Priority**: P2 +- **Status**: STUB — placeholder for follow-up PR (cancellation-safety probe; needs structured `select!`-based cancellation harness). +- **Wallet feature exercised**: cancellation safety of `wallet/platform_addresses/transfer.rs:31`; on-next-sync recovery in `wallet/platform_addresses/sync.rs:24`. +- **DET parallel**: none. +- **Preconditions**: bank-funded test wallet. +- **Scenario**: + 1. Bank-fund `addr_1` with `40_000_000`. + 2. Wrap `wallet.transfer({addr_2: 5_000_000})` in a `tokio::select!` against a controllable cancellation token. + 3. Trigger cancellation **after** the broadcast call returns (i.e. ST hit DAPI) but **before** the proof-fetch completes. Confirm the future is dropped via the cancellation token. + 4. Call `wallet.sync_balances()`. +- **Assertions**: + - Internal wallet state is consistent after the drop: no half-applied change-set, no orphaned in-flight marker that would block the next call. + - Post-`sync_balances`, the wallet observes the broadcasted transfer and records the change-set correctly: `balances[addr_2] == 5_000_000`, `addr_1` decreased by `5_000_000 + fee`. + - A subsequent `wallet.transfer({addr_3: 1_000_000})` succeeds — no duplicate broadcast of the previous transfer, no nonce collision. +- **Negative variants**: + - Cancellation **before** broadcast: assert no broadcast occurred and balances unchanged. +- **Harness extensions required**: a way to inject a cancellation point between broadcast and proof-fetch (likely a test-only hook on the harness SDK or a `select!` wrapper on the wallet call). This is the most invasive of the Harness-G cases; mark as "blocked on cancellation hook" if not yet plumbed. +- **Estimated complexity**: L +- **Rationale**: `tokio::select!` cancellation safety is a documented Tokio footgun. Without an asserted contract, the wallet may corrupt internal state on user-initiated cancellation (e.g. mobile app foregrounding/backgrounding) and only surface as "wallet shows wrong balance after I closed the app". + +#### Harness-ID-1 — `sweep_identities` regression: registered identities surrender credits at teardown +- **Priority**: P0 +- **Wallet feature exercised**: `tests/e2e/framework/cleanup.rs::sweep_identities` (was a no-op stub on `feat/rs-platform-wallet-e2e-cases`; implementation lands on the identity-tests-and-sweep branch). +- **DET parallel**: none. +- **Preconditions**: ID-001 helper available; bank identity configured for the sweep destination (per `bank_identity` env-var contract). +- **Scenario**: + 1. `let bank_pre = guard.base.ctx.bank().total_credits();` + 2. `let guard = setup_with_n_identities(2, 30_000_000).await?;` + 3. Do not issue any extra transfers. Capture `identity_a_pre` / `identity_b_pre` balances. + 4. `guard.teardown().await?`. +- **Assertions**: + - For each registered identity, post-teardown `Identity::fetch(...).balance()` is `0` or below `min_input_amount` (pin whichever shape the `sweep_identities` implementation adopts; document the choice in the test comment). + - `bank_post >= bank_pre - 2 * 30_000_000 - register_fees - sweep_fees - slack` (sweep recovers most of what was funded; no double-credit). + - The persistent test-wallet registry has no entry for `guard.base.test_wallet.id()` after teardown. +- **Negative variants**: + - Bank identity not configured → typed `IdentitySweepNoBank` error from teardown; registry entry retained for next-startup retry. +- **Harness extensions required**: `sweep_identities` lands on a sibling branch (this PR); this entry pins its contract on merge. +- **Estimated complexity**: S +- **Rationale**: Without a regression pin, a future refactor that reverts `sweep_identities` to `Ok(())` would slip past CI and identity credits would leak across runs until the bank starves. + +### Found-bug pins (Found-NNN) + +Bug-pin cases discovered during a QA-mindset audit of `packages/rs-platform-wallet/src/`. +Each entry names the contract violation, the proof shape that would catch it, +and what the fix should look like. The author of the production fix is a +separate concern; these entries pin the expected behaviour so the regression +becomes a test failure rather than a silent drift. + +#### Found-001 — `auto_select_inputs_for_withdrawal` ignores `min_input_amount` floor +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/platform_addresses/withdrawal.rs:170` (`auto_select_inputs_for_withdrawal`). +- **Suspected bug**: The withdrawal-side auto-selector iterates every funded address (`balance > 0`) and inserts each into the selected map. Unlike `transfer.rs::auto_select_inputs` (which filters out balances `< min_input_amount`), the withdrawal helper has no `min_input_amount` floor. An address holding fewer credits than the protocol's per-input minimum will be selected, and the resulting transition trips `InputBelowMinimumError` at `validate_structure` time. +- **Preconditions**: a platform payment account holds at least one address with balance `> 0` but `< min_input_amount` (e.g. an address that absorbed dust on a prior partial sync). +- **Scenario**: + 1. Seed account with two funded addresses: `addr_A.balance = 100_000_000`, `addr_B.balance = min_input_amount - 1`. + 2. Call `withdraw(account_index, InputSelection::Auto, ..., DeductFromInput(0))`. +- **Assertions** (the proof shape): + - The selector returns an `Err(PlatformWalletError::AddressOperation(_))` whose message references `min_input_amount`, OR the selector returns `Ok(map)` where every value is `>= min_input_amount`. + - In NEITHER case does it return `Ok(map)` containing `addr_B → (min_input_amount - 1)`. +- **Expected** (after fix): mirror the transfer-side filter — exclude candidates below `min_input_amount` before constructing the input map; if the survivors don't cover the requested fee, error with a descriptive message. +- **Actual** (current code): the function selects `addr_B` unconditionally; the broadcast then fails with a generic protocol-validation error that doesn't name the cause. +- **Severity**: HIGH (per-input minimum is a hard protocol gate; user gets an opaque rejection instead of a clear wallet-side error) +- **Harness extensions required**: `auto_select_inputs_for_withdrawal` is a private helper; the test exercises it indirectly via `withdraw(InputSelection::Auto, ...)` and seeded balances. Needs a way to seed individual platform-payment addresses with a sub-minimum balance — likely via direct `set_address_credit_balance` on `ManagedPlatformAccount` for the test setup. +- **Estimated complexity**: S +- **Rationale**: The transfer path was hardened against this exact failure mode (see `auto_select_inputs` filter). Withdrawal silently drifted out of parity. Real-world trigger: a dust-tier address arrives mid-sync and the user attempts an "auto-select" withdrawal — the wallet builds an unspendable transition. + +#### Found-002 — `auto_select_inputs_for_withdrawal` skips fee-target headroom check +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/platform_addresses/withdrawal.rs:170-235`. +- **Suspected bug**: The transfer-side `select_inputs_deduct_from_input` performs an explicit "fee target retains ≥ estimated_fee" check (Phase 3) before returning. The withdrawal-side helper checks only the aggregate `accumulated < estimated_fee` — i.e. that the *sum* of all inputs covers the fee. Under `[DeductFromInput(0)]` the fee is taken from the lex-smallest input's *remaining balance*, not the aggregate, so a selection where the lex-smallest input is fully consumed but other inputs cover the difference passes the helper's gate yet fails on chain — the same failure pattern PA-002b / commits `9ea9e7033c` and `687b1f86cd` pinned for transfer. +- **Preconditions**: a withdrawal account with at least one small input that becomes the lex-smallest "fee target" after BTreeMap insertion. +- **Scenario**: + 1. Seed account with `addr_A` (lex-smallest, balance == small amount equal to its own consumption with no fee headroom) and `addr_B` (large balance covering the rest). + 2. Call `withdraw(..., InputSelection::Auto, ..., DeductFromInput(0))`. +- **Assertions** (the proof shape): + - The selector errors with a "fee headroom" message, OR after broadcast `validate_fees_of_event` would return `fee_fully_covered = false` (provable in a unit test by feeding the helper output to `deduct_fee_from_outputs_or_remaining_balance_of_inputs` exactly as PA-006 does for transfer). +- **Expected** (after fix): adopt the transfer helper's Phase-3 headroom check — confirm `lex-smallest-input.balance - lex-smallest-input.consumed >= estimated_fee` before returning. +- **Actual** (current code): the helper performs only an aggregate check; the chain-time deduction misdirects to an empty-remaining input. +- **Severity**: HIGH (drives users into the same chain-time `AddressesNotEnoughFundsError` class as platform #3040) +- **Harness extensions required**: same as Found-001 — fine-grained seeding of platform-payment account balances. A protocol-level reproduction (analogous to `pre_fix_buggy_selector_output_is_rejected_by_protocol_fee_deduction` in transfer's tests) is the simplest proof shape. +- **Estimated complexity**: M +- **Rationale**: Withdrawal lags transfer's hardening; the same regression class will silently re-emerge in withdrawal until the contract is pinned. + +#### Found-003 — `addresses_with_balances` and `total_credits` only see the first platform-payment account +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/platform_addresses/wallet.rs:233` (`addresses_with_balances`), `wallet/platform_addresses/wallet.rs:271` (`total_credits`). +- **Suspected bug**: Both methods reach for `first_platform_payment_managed_account()` and return data from that single account. The doc comments make no mention of the "first account only" restriction (`addresses_with_balances` says "all platform addresses", `total_credits` says "total platform credits across all addresses"). Wallets with multiple platform-payment accounts (DIP-17 supports this) silently undercount. +- **Preconditions**: a wallet with two or more `PlatformPayment` accounts, each holding a non-zero balance on at least one address. +- **Scenario**: + 1. Construct a wallet with `WalletAccountCreationOptions` that yields two PlatformPayment accounts (account `0` and account `1`). + 2. Fund one address on account `0` with `40_000_000`; fund one address on account `1` with `60_000_000`. + 3. Read `wallet.platform().addresses_with_balances().await` and `wallet.platform().total_credits().await`. +- **Assertions** (the proof shape): + - `addresses_with_balances` returns at least two entries (one from each account). + - `total_credits == 100_000_000` (sum across both accounts). +- **Expected** (after fix): iterate `core_wallet.platform_payment_managed_accounts()` (or equivalent multi-account accessor) and aggregate. +- **Actual** (current code): returns only account-0 data; second account's `60_000_000` is invisible from these accessors. +- **Severity**: MEDIUM (UI-facing; the user sees a "wrong balance" without any error indication) +- **Harness extensions required**: a test wallet builder that requests multiple PlatformPayment accounts at creation. The existing `wallet_factory` defaults to one; a `WalletAccountCreationOptions` variant or test-only setup is needed. +- **Estimated complexity**: S +- **Rationale**: The "first account only" restriction is a load-bearing implicit assumption that nothing in the public API surface tells callers about. Multi-account support is documented at the wallet-creation layer; the readback must match. + +#### Found-004 — `transfer` / `withdraw` / `fund_from_asset_lock` silently fall back to `address_index = 0` on lookup miss +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:157-167`, `wallet/platform_addresses/withdrawal.rs:142-152`, `wallet/platform_addresses/fund_from_asset_lock.rs:130-140`. +- **Suspected bug**: All three call sites build a `PlatformAddressBalanceEntry` whose `address_index` is computed via a `find_map(...).unwrap_or(0)` over the account's address pool. If the address truly is not in the pool (defensive case — e.g. caller passed an address that doesn't belong to the account), the entry persists with `address_index = 0`, mis-attributing the balance update to whichever address actually sits at index 0. The persister then writes the wrong row. +- **Preconditions**: an account containing at least one address at index `0`. A subsequent operation references an address NOT in the pool (e.g. via `Explicit` input that's foreign to this account). +- **Scenario**: + 1. Build account `A` with addresses `addr_at_0`, `addr_at_1`, `addr_at_2`. + 2. Construct a transfer / withdrawal / fund call referencing a `PlatformAddress` that is NOT in any of the account's pools but is otherwise well-formed. + 3. Inspect the returned `PlatformAddressChangeSet`. +- **Assertions** (the proof shape): + - The changeset must NOT contain an entry with `(address: foreign_addr, address_index: 0)` — that's a corrupted persistence row. + - Either the operation rejects with a typed error before producing a changeset entry, OR the entry omits the foreign address entirely. +- **Expected** (after fix): on `find_map(...) == None`, log + skip the entry instead of attributing it to index 0; or fail the call with a typed error pointing at the unknown address. +- **Actual** (current code): the entry is attributed to index 0 and written to the persister. +- **Severity**: MEDIUM (silent data corruption in the persister's address table; downstream readers think `addr_at_0`'s balance is whatever the SDK reported for the foreign address) +- **Harness extensions required**: a way to drive the call site with a foreign `PlatformAddress`. The transfer / fund paths accept `Explicit*` input maps so this is straightforward; the withdrawal path is per-account so requires a similar input-construction helper. +- **Estimated complexity**: S +- **Rationale**: `unwrap_or(0)` on a derivation-index lookup is the canonical "should have been a typed error" pattern. With three call sites identical, the regression class is broad. + +#### Found-005 — `register_from_addresses` / `top_up_from_addresses` discard SDK-returned address balances and nonces +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/identity/network/register_from_addresses.rs:87-122`, `wallet/identity/network/top_up_from_addresses.rs:58`. +- **Suspected bug**: Both call sites pattern-match the SDK return as `(_address_infos, ...)` and drop the address-info map. `transfer()` and `withdraw()` (in `platform_addresses/`) consume this same map to update local balances + nonces. The TODO comment in `register_from_addresses.rs:139-143` admits the gap. As a result, addresses' cached `(balance, nonce)` go stale immediately after these calls — until the next BLAST sync round resolves them. A second operation against the same address before the sync uses a stale nonce and is rejected. +- **Preconditions**: a platform-funded address with a known nonce. Run two consecutive operations against it. +- **Scenario**: + 1. Fund `addr_A` on test wallet with `60_000_000`. Note the address's nonce (post-funding). + 2. Call `register_from_addresses({addr_A: 30_000_000}, ...)` — this consumes part of addr_A's balance and bumps its nonce on chain. + 3. Without an intervening BLAST sync, immediately call a second operation against `addr_A` (e.g. another `register_from_addresses` or a `transfer`). +- **Assertions** (the proof shape): + - After step 2, `wallet.platform().addresses_with_balances()` reflects `addr_A`'s post-call balance (i.e. NOT the pre-call `60_000_000`). + - The cached nonce for `addr_A` matches the chain-time nonce post-step-2. + - Step 3 succeeds (would fail with a stale-nonce error today). +- **Expected** (after fix): mirror the `transfer()` pattern — walk `address_infos` and update each address's cached `AddressFunds` + emit a `PlatformAddressChangeSet` so the persister sees the updated nonce. +- **Actual** (current code): the map is dropped; local cache stays at pre-call values. +- **Severity**: MEDIUM (causes "spam-click" failures and surprises power users; not silent corruption but slow-to-recover staleness) +- **Harness extensions required**: a way to issue two back-to-back operations against the same input address with no sync between them. +- **Estimated complexity**: M (needs identity-signer + DPNS-style identity setup, then two consecutive identity-funding calls) +- **Rationale**: The TODO comment in the source admits the gap; a test pins it so the comment doesn't outlive the next refactor that touches these files. + +#### Found-006 — `top_up_identity_with_funding` ignores caller-supplied `topup_index` +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/identity/network/top_up.rs:60-106`. +- **Suspected bug**: The method's doc says `topup_index` is "An incrementing index distinguishing successive top-ups for the same identity". The implementation prefixes the parameter with `_` and the function body derives the funding key path from `identity_index` alone (with a `TODO(platform-wallet)` comment confirming the parameter is unused). Two consecutive top-ups for the same identity therefore derive from the same `(IdentityTopUp, identity_index)` path — yielding the same one-time key address, the same outpoint candidate, and a likely-duplicate asset-lock transaction or nonce collision on the same address. +- **Preconditions**: an identity registered on testnet via the wallet. +- **Scenario**: + 1. Register identity `I` via `register_identity_with_funding_external_signer`. + 2. Call `top_up_identity(&I.id, topup_index=0, amount_duffs=A0, ...)`. + 3. Call `top_up_identity(&I.id, topup_index=1, amount_duffs=A1, ...)` — same identity, fresh `topup_index`. +- **Assertions** (the proof shape): + - The two top-up calls produce DIFFERENT funding-output addresses (re-derived from different paths). + - The two asset-lock transactions have different txids. + - The doc claim about "successive top-ups for the same identity" is honoured — both calls succeed and credit the identity by `A0 + A1` total. +- **Expected** (after fix): wire `topup_index` into the derivation path (or remove the parameter and document the constraint). +- **Actual** (current code): two consecutive top-ups for the same identity share the same derivation context; the second is liable to collide with the first depending on caller behaviour. +- **Severity**: HIGH (the public API has a parameter that does nothing; callers relying on the doc-stated semantics produce broken transactions) +- **Harness extensions required**: identity setup; access to the asset-lock transaction details (currently inside `AssetLockManager`). +- **Estimated complexity**: M +- **Rationale**: A parameter that's documented as load-bearing but discarded by the implementation is a contract violation that no test currently catches. The TODO in the source admits the gap; a test makes it actionable. + +#### Found-007 — `PlatformAddressSyncManager::start` lacks a generation guard so a fast `start()` → `stop()` → `start()` can spawn parallel sync threads +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `manager/platform_address_sync.rs:189-224` (`start`). +- **Suspected bug**: `start()` checks `guard.is_some()` and bails early, then installs a fresh cancel token. On loop exit the spawned thread unconditionally writes `*guard = None;`. There is no generation counter (unlike `IdentitySyncManager::start`, which does have one). Trace: `start()` spawns thread A → `stop()` cancels A → `start()` spawns thread B (guard now Some(B)) → thread A's loop finally exits and overwrites `guard = None`. Thread B is still running, but `is_running()` reports `false` and a third `start()` will spawn thread C. Multiple sync threads can run concurrently against the same `wallets` map, each issuing GRPC calls to DAPI. +- **Preconditions**: a manager whose `start()` returns quickly enough to interleave a `stop()` and another `start()` before the original thread observes cancellation. +- **Scenario**: + 1. Build a manager with one registered wallet and a reachable DAPI endpoint. + 2. Call `start()`. + 3. Immediately call `stop()`. + 4. Immediately call `start()` again (before thread A's first sync round completes). + 5. Wait for thread A to observe its cancel token (it will, eventually) and clean up. + 6. Inspect `is_running()` and the actual thread count. +- **Assertions** (the proof shape): + - At every moment after step 4, AT MOST one platform-address-sync thread is running. + - `is_running() == true` for the entire window between step 4 and a later `stop()`. + - After thread A exits in step 5, `is_running()` does NOT drop to `false` (because thread B is still active). +- **Expected** (after fix): adopt `IdentitySyncManager`'s generation-counter pattern — the spawned thread only clears the guard if its own generation matches the latest installed one. +- **Actual** (current code): thread A unconditionally clears the guard on exit, masking thread B's existence to `is_running()`. +- **Severity**: MEDIUM (parallel sync threads cause duplicate DAPI calls, write contention on the wallet manager lock, and inflated rate-limit usage; not data corruption but operationally noisy) +- **Harness extensions required**: a way to count active "platform-address-sync" threads (`std::thread::Builder::name`) or to wedge a sync iteration so cancellation is observable but slow. The simplest proof shape is a counter that the sync routine increments per pass; if two threads run concurrently the counter advances faster than the interval. +- **Estimated complexity**: M +- **Rationale**: `IdentitySyncManager` already has the right pattern. The asymmetry between the two managers is the bug. + +#### Found-008 — `LockNotifyHandler` uses `notify_waiters()` so a lock event arriving in the check / wait gap of `wait_for_proof` is dropped +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/asset_lock/lock_notify_handler.rs:30` (`notify_waiters()`); `wallet/asset_lock/sync/proof.rs:287-337` (`wait_for_proof`'s check-then-await loop). +- **Suspected bug**: `LockNotifyHandler::on_sync_event` calls `Notify::notify_waiters()`, which wakes only currently-registered waiters and produces no permit. `wait_for_proof` runs a check-then-await loop: read state under a read lock, drop the lock, then call `lock_notify.notified().await`. If a lock event fires in the gap between the state check and the registration of the next `notified()` future, no waiter is currently registered, the notification is discarded, and the waiter sleeps until the next event or the timeout. +- **Preconditions**: SPV emits exactly one `InstantLockReceived` for the watched outpoint at a precise moment. +- **Scenario**: + 1. Tracked asset lock `OL` is in `Broadcast` state. + 2. Test thread calls `wait_for_proof(&OL.out_point, timeout=300s)`. + 3. The sequence (deterministic for the test): + - Wait for `wait_for_proof` to enter the loop and complete its first state check (no proof yet, still `Broadcast`). + - BEFORE `wait_for_proof` reaches `lock_notify.notified()`, drive `LockNotifyHandler::on_sync_event(InstantLockReceived(OL))` exactly once. + - Update the underlying `TransactionContext` to `InstantSend(lock)` AT THE SAME TIME (so a re-check would succeed). +- **Assertions** (the proof shape): + - `wait_for_proof` returns `Ok(InstantAssetLockProof(...))` within `1s` (i.e. without waiting for the timeout). + - Counter-assertion if buggy: it sleeps until either a follow-up notify or `FinalityTimeout`. +- **Expected** (after fix): use `Notify::notify_one()` (which keeps a permit if no waiter is registered) or call `notified()` BEFORE the state check (so the future is registered before the check happens, per Tokio's documented "intended use"). +- **Actual** (current code): a single missed notification stalls the waiter. +- **Severity**: HIGH (asset-lock proof flow is on the critical path of identity registration / top-up; a stalled wait surfaces as long timeouts followed by spurious "asset lock expired" errors) +- **Harness extensions required**: a test handle on `LockNotifyHandler` (it's already constructed with an `Arc`); a way to drive the handler synchronously with a controlled state mutation. The wait-for-proof check uses `wallet_manager`, so the test must mutate the tracked record's `TransactionContext` before re-driving the handler. +- **Estimated complexity**: M +- **Rationale**: This is the textbook `Notify` footgun — `notify_waiters` doesn't store a permit, so check-then-await is a missed-wakeup. The asset-lock flow is exactly the place where one missed wakeup turns a 5-second proof wait into a 5-minute hang. + +#### Found-009 — wallet-event adapter swallows `RecvError::Lagged` events without compensating recovery +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `changeset/core_bridge.rs:71-115` (the `tokio::select!` loop in `spawn_wallet_event_adapter`). +- **Suspected bug**: On `Err(RecvError::Lagged(n))` the loop logs a warning and continues. The dropped events are gone — `WalletEvent::TransactionDetected`, `BlockProcessed`, etc. that the broadcast channel discarded never reach the persister. Persisted state then lags reality, and there's no compensating mechanism to refetch them. +- **Preconditions**: the broadcast channel's capacity is exceeded (many events fired in a tight burst, e.g. an SPV catch-up with a lot of UTXO changes). +- **Scenario**: + 1. Configure the persister to record every `store(..., cs)` it sees. + 2. Drive the upstream broadcast channel with `(channel_capacity + 10)` distinct events in a tight burst, each with a unique `wallet_id` or `txid` so the persister can tell them apart. + 3. Wait for the loop to drain. +- **Assertions** (the proof shape): + - The persister observes ALL injected events. Or, equivalently, at least one of: (a) the loop's recovery mechanism re-emits the dropped events (e.g. by walking `wallet_manager` state and emitting a synthetic catch-up changeset), (b) the loop returns / signals an error to the caller so the application can react. Today neither happens. +- **Expected** (after fix): on `Lagged(n)`, either re-subscribe and emit a "full state snapshot" changeset, or escalate the error (e.g. via a status channel) so the operator can issue an explicit re-sync. Silent loss is not OK because the persister diverges from chain reality with no signal. +- **Actual** (current code): events are gone, only a warning log remains. +- **Severity**: MEDIUM (losing core-wallet events causes the persister's stored state to diverge silently from the in-memory `WalletManager` state) +- **Harness extensions required**: a way to construct a small-capacity `tokio::sync::broadcast::Sender` and inject events directly; or an instrumented wallet manager that exposes the broadcast for tests. +- **Estimated complexity**: M +- **Rationale**: `Lagged` is rare but not impossible. When it happens, the wallet's persisted state silently goes wrong. Documenting the contract one way or the other (re-emit / escalate / accept loss) is the minimum bar. + +#### Found-010 — `PlatformAddressChangeSet::apply` ignores `funds.nonce` so persister-only nonce state can drift behind balance +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/apply.rs:259-273` (the `platform_addresses` apply branch). +- **Suspected bug**: The apply path walks `addr_cs.addresses` and writes only `entry.funds.balance` via `set_address_credit_balance`. The `nonce` field on `entry.funds` is dropped — the comment at line 266-270 admits this and points at "evo-tool's platform_address_balances table" as the alleged consumer of the nonce. But that consumption only happens via the FFI persister callback; pure in-memory replay (e.g. tests, restart-into-memory) loses the nonce and a subsequent operation against the same address will use a stale value. +- **Preconditions**: a persister round-trip whose only consumer is `apply_changeset` (no FFI sidecar). +- **Scenario**: + 1. Source `PlatformWalletInfo` `A` has `addr_X` with `(balance=50, nonce=7)`. + 2. Snapshot `A` into a `PlatformAddressChangeSet` and apply it to a fresh `PlatformWalletInfo` `B`. + 3. Read `B`'s cached state for `addr_X`. +- **Assertions** (the proof shape): + - `B`'s cached nonce for `addr_X == 7`. + - Counter-assertion if buggy: `B`'s nonce reads back as `0` (the default) because apply never wrote it. +- **Expected** (after fix): persist + apply the nonce alongside the balance — extend `set_address_credit_balance` to also accept the nonce, or add a sibling write. +- **Actual** (current code): apply discards the nonce. Test harnesses replaying a changeset see balance-only state. +- **Severity**: MEDIUM (only bites pure-Rust persisters and tests; FFI consumers are unaffected because they read the changeset directly) +- **Harness extensions required**: ability to read back per-address nonce from `ManagedPlatformAccount`. If no such accessor exists today, the test would need a new one. +- **Estimated complexity**: S +- **Rationale**: The contract is "apply replays the changeset onto state". Replaying balance only is a partial replay; the silent-drop of nonce is a documentation gap that masquerades as design. + +#### Found-011 — `IdentityChangeSet::merge` documents commutativity but `insert + tombstone` for the same key resolves to "removed" regardless of submission order +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `changeset/changeset.rs:336-421` (`IdentityChangeSet::merge`); `wallet/apply.rs:127-143` (the apply order: insert then remove). +- **Suspected bug**: The `Merge` trait's docstring says changesets are "commutative and associative". `IdentityChangeSet::merge` extends `identities` (inserts) and `removed` (tombstones) independently with no insert-vs-tombstone resolution. The apply order is "insert first, then remove", so a merged changeset that contains BOTH an insert and a tombstone for identity `id_X` always resolves to "removed", regardless of which side was passed first to `merge`. The latent contract violation: `A.merge(B)` then apply ≠ `B.merge(A)` then apply for the case `A = {insert id_X}`, `B = {tombstone id_X}` (both produce "removed"), but the merger has no way to express "the insert wins because it came later". The docstring on the changeset itself acknowledges the hazard ("Merge ordering hazard"); the trait-level docstring still claims commutativity. One of the two is wrong. +- **Preconditions**: two changesets that disagree on a single identity (one inserts, one removes). +- **Scenario**: + 1. Build `cs_insert` containing `identities: {id_X → entry}` only. + 2. Build `cs_remove` containing `removed: {id_X}` only. + 3. Compute state_AB by merging cs_insert into a copy, then merging cs_remove, then applying. + 4. Compute state_BA by merging cs_remove into a copy, then merging cs_insert, then applying. +- **Assertions** (the proof shape): + - If commutativity is the contract: state_AB == state_BA AND for at least one of them id_X is present (non-vacuous). Today both end up "removed", so the contract is "tombstone wins". State the rule in the docstring. + - If "tombstone wins" is the contract: docstring on the `Merge` trait must say so explicitly; the test pins the ordering. +- **Expected** (after fix): pick one — either `merge` resolves the conflict by last-seen (A.merge(B) ⇒ tombstone wins because it came later in `B`; B.merge(A) ⇒ insert wins because it came later in `A`), or document "tombstone always wins regardless of merge order" and remove the commutativity claim. +- **Actual** (current code): tombstone always wins and the docstring claims commutativity; one of the two is misleading. +- **Severity**: LOW (no current emitter produces both insert and tombstone for the same key in one mutation, per the in-source comment, but the latent footgun is documented as if it isn't a footgun) +- **Harness extensions required**: none — pure unit-test-shaped. +- **Estimated complexity**: S +- **Rationale**: A "commutative" claim that doesn't hold for the simplest counter-example is a documentation bug that misleads future emitters. Pinning the actual semantics in a test forces the doc to match reality. + +#### Found-012 — `validate_or_upgrade_proof` and `wait_for_proof` only consult `standard_bip44_accounts`, missing CoinJoin / non-BIP-44 funding accounts +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/asset_lock/sync/proof.rs:43-54` (`validate_or_upgrade_proof`); `wallet/asset_lock/sync/proof.rs:289-322` (`wait_for_proof`); `wallet/asset_lock/sync/recovery.rs:104-110` (`resolve_status_from_info`). +- **Suspected bug**: All three lookups walk `info.core_wallet.accounts.standard_bip44_accounts.get(&account_index)` and bail with "Transaction not found" if the BIP-44 lookup misses. But `account_index` on the tracked lock can refer to a CoinJoin account, an identity account, or any non-BIP-44 funding source. A real CoinJoin-funded asset lock would have its tx in `coinjoin_accounts` (or wherever), not `standard_bip44_accounts`. The wallet then can't resolve the chain status, can't upgrade IS to CL, and `wait_for_proof` returns "transaction not found" even though the chain has the tx. +- **Preconditions**: an asset lock funded from a non-BIP-44 account. +- **Scenario**: + 1. Track a `TrackedAssetLock` whose `account_index` corresponds to a non-BIP-44 account containing the asset-lock tx. + 2. Call `wait_for_proof(&out_point, timeout=10s)`. +- **Assertions** (the proof shape): + - `wait_for_proof` returns `Ok(_)` (the proof) within the timeout, OR errors with a CLEAR account-type-mismatch message — never a generic "Transaction not found in account N" message that masks the real cause. +- **Expected** (after fix): walk every account collection, not just `standard_bip44_accounts`; or carry the account *kind* alongside `account_index` on `TrackedAssetLock`. +- **Actual** (current code): non-BIP-44 funded asset locks silently fail proof discovery. +- **Severity**: MEDIUM (impacts CoinJoin / shielded users; the failure mode is "asset lock never resolves" with a misleading error) +- **Harness extensions required**: ability to register a CoinJoin or non-BIP-44 account on the test wallet and seed a tx into its `transactions` map. +- **Estimated complexity**: M +- **Rationale**: Hardcoding `standard_bip44_accounts` in three places means the bug class spans the entire asset-lock proof pipeline. Pinning the contract on at least the proof-wait path catches a future shielded / CoinJoin asset-lock effort. + +#### Found-013 — `recover_asset_lock_blocking` swallows every error and returns `()` — silent recovery failure +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/asset_lock/sync/recovery.rs:36-88` (`recover_asset_lock_blocking`). +- **Suspected bug**: The function returns `()`; every failure path is a silent `return`: `wallet_id` not in manager → silent return; lock already tracked → silent return; persister `store` failure → logged and discarded inside `queue_asset_lock_changeset`. There is no signal to the caller that recovery either ran successfully or failed — the doc neither mentions success/failure nor offers a query path to check whether the lock is now tracked. +- **Preconditions**: a recovery attempt against a wallet that doesn't exist in the manager. +- **Scenario**: + 1. Construct an `AssetLockManager` whose `wallet_id` was deliberately removed from the wallet manager. + 2. Call `recover_asset_lock_blocking(...)`. +- **Assertions** (the proof shape): + - The caller can detect the failure — either via a `Result<(), _>` return type, or a follow-up `is_tracked` check that reflects "no, the recovery did not land". + - Today: the function returns `()`; the caller has no way to distinguish "recovery succeeded" from "wallet was missing". +- **Expected** (after fix): change the signature to `Result<(), PlatformWalletError>` (matching the rest of this module's surface), or document explicitly that the function is best-effort and provide a sibling `is_tracked` accessor for confirmation. +- **Actual** (current code): silent failure on `wallet_id` miss; the test harness can't distinguish a successful recovery from a no-op. +- **Severity**: LOW (a recovery failure should be loud; silent swallow is poor ergonomics rather than data corruption — but evo-tool / DET-style callers may rely on this contract) +- **Harness extensions required**: an `is_tracked` query on `AssetLockManager` (likely already exists via `list_tracked_locks`). +- **Estimated complexity**: S +- **Rationale**: `pub fn ... -> ()` on an operation that has multiple distinct failure modes is a documentation bug; pin the contract one way or the other. + +#### Found-014 — `transfer_credits_with_external_signer` never updates the receiver's local balance even when the receiver is wallet-owned +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `wallet/identity/network/transfer.rs:74-138`. +- **Suspected bug**: The SDK call returns `(sender_balance, receiver_balance)`; the wallet uses only `sender_balance` and pattern-matches the receiver as `_receiver_balance`. If the receiver identity is also owned by this wallet (a wallet hosting two identities is the canonical case), its local cached balance falls out of sync until the next identity sync round. +- **Preconditions**: a wallet hosting two identities `I_send` and `I_recv`. Both are managed by the local `IdentityManager`. +- **Scenario**: + 1. Register both `I_send` and `I_recv` against the same wallet. + 2. Record both identities' cached balances pre-transfer. + 3. Call `transfer_credits_with_external_signer(I_send, I_recv, amount, ...)`. + 4. Read both cached balances post-call (no intervening sync). +- **Assertions** (the proof shape): + - `I_send.cached_balance` decreased by `amount + fee` (call returns `sender_balance`, so this side updates). + - `I_recv.cached_balance` increased by `amount` exactly. + - Counter-assertion if buggy: `I_recv.cached_balance` is unchanged from its pre-call value. +- **Expected** (after fix): if `I_recv` is in the local `IdentityManager`, write `set_balance(receiver_balance)` for it too and emit a snapshot changeset. +- **Actual** (current code): receiver-side cache is stale until the next sync; UI reads show the wrong balance for the receiver. +- **Severity**: MEDIUM (UI staleness for self-transfers; not data corruption, but a contract violation since the SDK explicitly reports the receiver balance and the wallet has it on hand) +- **Harness extensions required**: identity setup with two wallet-owned identities (Wave A blocker). +- **Estimated complexity**: S +- **Rationale**: The SDK pattern-binds the receiver balance specifically so the wallet can use it. Discarding it via `_receiver_balance` is a small but precise contract miss. + +#### Found-015 — `load_from_persistor` leaves a partially registered wallet in `wallet_manager` when `wallet_id` mismatches +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `manager/load.rs:69-85`. +- **Suspected bug**: The load loop calls `wm.insert_wallet(wallet, platform_info)` which yields an internally-recomputed `wallet_id`. Immediately afterwards the code compares against `expected_wallet_id` and returns an `Err` if they differ. But by that point the wallet has already been inserted into `self.wallet_manager`. The error-return short-circuits any subsequent rollback, so the manager ends up holding a wallet whose id doesn't match the persisted record — and the `self.wallets` map (the public registry) doesn't have it. Subsequent reads via `wallets.get(...)` return `None` while sync paths see the stale entry. +- **Preconditions**: a persister whose load returns a `(expected_wallet_id, wallet_state)` pair where `expected_wallet_id` != `Wallet::compute_id(wallet_state.wallet)`. (Trivially constructible in tests.) +- **Scenario**: + 1. Build a `ClientStartState` with `wallets[expected_id] = state` where `state.wallet`'s recomputed id is `actual_id != expected_id`. + 2. Call `manager.load_from_persistor()` and observe the error. + 3. Inspect `manager.wallet_manager` (count of wallets) and `manager.wallets` (count of public-registered wallets). +- **Assertions** (the proof shape): + - On error from `load_from_persistor`, both `wallet_manager` and `self.wallets` contain ZERO wallets — neither was partially populated. + - Counter-assertion if buggy: `wallet_manager` contains ONE wallet (the partial insert) while `self.wallets` is empty. +- **Expected** (after fix): roll back the `wm.insert_wallet` (call `wm.remove_wallet(wallet_id)`) before returning the error, or perform the id check BEFORE inserting. +- **Actual** (current code): the manager is left in a half-loaded state where the inner manager and the outer registry disagree. +- **Severity**: MEDIUM (only triggered by corrupted persisted state, but when it triggers the wallet manager is operationally inconsistent) +- **Harness extensions required**: a stub persister that returns a malformed `ClientStartState`. +- **Estimated complexity**: M +- **Rationale**: Half-loaded states lead to the worst class of bug — the manager's internal invariant ("every entry in `wallet_manager` has a matching `Arc` in `self.wallets`") is silently broken. + +#### Found-016 — `remove_wallet` removes from `self.wallets` then `self.wallet_manager` non-atomically, leaving a window where readers see only one of the two +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `manager/wallet_lifecycle.rs:322-337`. +- **Suspected bug**: The function takes the `self.wallets` write lock, removes the wallet, drops the lock, then takes the `self.wallet_manager` write lock and removes from there. Between the two operations, a concurrent task can read `self.wallet_manager` (via e.g. a sync routine) and find the wallet still present, while `self.wallets` no longer has it. The sync routine then queries provider state for a wallet it can't find via the public registry — which manifests as `WalletNotFound` deep inside an unrelated callsite. +- **Preconditions**: at least one concurrent reader on `self.wallet_manager` while `remove_wallet` is in progress. +- **Scenario**: + 1. Register a wallet `W` with the manager. + 2. Spawn task `T1`: in a tight loop, take `wallet_manager.read()` and check whether `W` is present; record both that result and the result of `self.wallets.read()` for the same wallet. + 3. From the main task, call `manager.remove_wallet(&W.id)`. + 4. Stop `T1`. +- **Assertions** (the proof shape): + - For every observation `T1` made: either both registries report present, or both report absent. Never one-of-two. + - Counter-assertion if buggy: at least one observation shows `wallet_manager` present, `self.wallets` absent. +- **Expected** (after fix): perform both removes under a coordinated lock or document the transient inconsistency window. Operations that depend on cross-registry consistency must guard against it. +- **Actual** (current code): a small but real window of inconsistency. +- **Severity**: MEDIUM (race window is small but the resulting `WalletNotFound` errors look like spontaneous failures at unrelated call sites) +- **Harness extensions required**: a way to wedge a concurrent reader with deterministic interleaving (e.g. a `tokio::sync::Barrier` injected for tests). +- **Estimated complexity**: M +- **Rationale**: Two-registry models (here, the inner `WalletManager` plus the outer `Arc` registry) are a classic source of inconsistency windows. The fix is invariant-driven; the test pins the invariant. + +#### Found-017 — `register_wallet` registers wallet in memory even when persister `store` returns `Err` — vanishes on next launch +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `manager/wallet_lifecycle.rs:238-244`, `manager/wallet_lifecycle.rs:296-298`. +- **Suspected bug**: The persister is invoked to store the registration changeset (metadata + per-account specs + per-pool snapshots). On failure the code logs and proceeds to insert the wallet into `self.wallets`. The wallet is fully usable in the current process but on next launch the persister has no record of it — the user-visible effect is "I imported my wallet, used it, restarted the app, and the wallet is gone". +- **Preconditions**: a persister whose `store` returns an error for the registration round. +- **Scenario**: + 1. Build a manager with a stub persister that fails (`store(...) → Err(_)`) on its first call. + 2. Call `create_wallet_from_mnemonic(...)`. + 3. Inspect the result and the manager state. +- **Assertions** (the proof shape): + - EITHER `create_wallet_from_mnemonic` returns `Err(_)` so the caller knows the wallet won't survive a restart, AND the manager state is rolled back (no entry in `self.wallets`, no entry in `self.wallet_manager`). + - OR the function succeeds AND the persister failure is exposed via a status / event channel the caller can subscribe to. A silent log isn't sufficient. +- **Expected** (after fix): treat the registration `store` as load-bearing — fail the registration and roll back the in-memory state on persister error. +- **Actual** (current code): the registration silently proceeds; the user discovers the loss only on next launch. +- **Severity**: HIGH (data loss class — a successful-looking wallet import that doesn't survive restart) +- **Harness extensions required**: a stub persister with a configurable failure mode. +- **Estimated complexity**: S +- **Rationale**: The current code path assumes the persister is "best-effort". For the registration-round changeset specifically, this assumption is wrong — without that record, the wallet is unrecoverable. + +#### Found-018 — `PlatformAddressChangeSet::merge` documents fee semantics as "fee paid by the transfer that produced this changeset" but actually accumulates fees across merged changesets +- **Priority**: P2 (bug pin — failure is the proof) +- **Wallet feature exercised**: `changeset/changeset.rs:586-635` (`PlatformAddressChangeSet::fee_paid`, `Merge::merge`). +- **Suspected bug**: The `fee` field's docstring says "Fee paid by the transfer that produced this changeset, in credits." (singular). `fee_paid()` returns `self.fee`. But `merge` does `self.fee = self.fee.saturating_add(other.fee)` — so a merged changeset's `fee_paid()` returns the sum of fees across multiple transfers. A consumer that calls `fee_paid()` on a merged changeset and expects "the fee for ONE transfer" gets a misleading number with no way to tell. +- **Preconditions**: two changesets, each with a non-zero `fee`. +- **Scenario**: + 1. Build `cs_a` with `fee = 100_000`. + 2. Build `cs_b` with `fee = 200_000`. + 3. Compute `cs_a.merge(cs_b)`. + 4. Read `cs_a.fee_paid()`. +- **Assertions** (the proof shape): + - Pick one — and document the choice: + - (a) `fee_paid()` on a merged changeset is the sum: `300_000`. Then rename / re-document the field to "total fee paid across operations in this batch". + - (b) `fee_paid()` is the fee of a single transfer; `merge` should preserve it via last-write-wins or refuse to merge non-zero fees. Then document and enforce. + - Today: `fee_paid()` returns `300_000` while the docstring says "fee paid by the transfer that produced this changeset" — internally inconsistent. +- **Expected** (after fix): rename the docstring or change the merge policy. The two are at war. +- **Actual** (current code): consumers reading `fee_paid()` on a merged changeset can mis-count the per-transfer fee. +- **Severity**: LOW (only callers reading the fee accessor on a merged changeset are affected; the changeset is mostly consumed pre-merge) +- **Harness extensions required**: none — pure unit-test. +- **Estimated complexity**: S +- **Rationale**: Two facts in the source disagree (docstring vs merge behaviour). One of them is wrong. A test pins which. + +--- + +### Found-bug pins (Found-NNN) + +Bug-pin cases discovered during a QA-mindset audit of `packages/rs-platform-wallet/`. +Each entry names the contract violation, the proof shape that would catch it, +and what the fix should look like. The author of the production fix is a +separate concern; these entries pin the expected behaviour so the regression +becomes a test failure rather than a silent drift. + +> Found-001..Found-018 live on a sibling branch (`feat/rs-platform-wallet-e2e-cases` → +> commit `5015e658e8`) and will rejoin this branch at the consolidation step. The +> entry below is filed against the present branch (`feat/rs-platform-wallet-e2e-cases-pa`) +> because the audit target — the harness's `SeedBackedIdentitySigner` — was added on this +> stack and was not yet present when Found-001..018 were drafted. + +#### Found-019 — `SeedBackedIdentitySigner` re-hashes `ECDSA_HASH160` keys, double-hashing the lookup so any `ECDSA_HASH160`-typed `IdentityPublicKey` silently misses +- **Priority**: P2 (bug pin — failure is the proof) +- **Severity**: HIGH (signer-side correctness bug; identity-key sign / can_sign_with paths fail for one of two key types the impl claims to support) +- **Wallet feature exercised**: `tests/e2e/framework/signer.rs:114-122` (`can_sign_with`), `tests/e2e/framework/signer.rs:128-143` (`lookup_identity_secret`). +- **Suspected bug**: Both lookup paths compute `let pkh = ripemd160_sha256(key.data().as_slice())` and probe `inner.address_private_keys` with the result. The cache itself was populated at construction in `SimpleSigner::from_seed_for_identity` (`packages/simple-signer/src/signer.rs:235`) keyed by `ripemd160_sha256(&pubkey.serialize())` — i.e. RIPEMD160(SHA256(raw 33-byte secp256k1 pubkey)). For `KeyType::ECDSA_SECP256K1` the lookup matches: `key.data()` is the raw 33-byte pubkey, hashing it once yields the cache key. For `KeyType::ECDSA_HASH160` the lookup does NOT match: `key.data()` is already a 20-byte `ripemd160_sha256(pubkey)` per `KeyType::public_key_data_from_private_key_data` and `KeyType::default_size` (`packages/rs-dpp/src/identity/identity_public_key/key_type.rs:59,244`). The impl hashes that 20-byte hash *again*, producing `ripemd160_sha256(ripemd160_sha256(pubkey))` ≠ stored key. The match arms at lines 90 and 116 explicitly admit `ECDSA_HASH160` as supported, so the type signature lies — every call against an `ECDSA_HASH160` key returns `can_sign_with == false` and `sign(..) == Err(ProtocolError::Generic("identity key {hex} not in pre-derived gap window"))` regardless of whether the underlying secret is in the cache. +- **Preconditions**: an `IdentityPublicKey` with `key_type == ECDSA_HASH160` whose `data` is `ripemd160_sha256(pubkey)` for a pubkey derived at one of the pre-cached gap-window slots `(identity_index, key_index ∈ 0..DEFAULT_GAP_LIMIT)`. +- **Scenario** (pure unit test on the harness signer — no chain required): + 1. Build a seed (e.g. `[0x42; 64]`) and `let signer = SeedBackedIdentitySigner::new(&seed, Network::Testnet, identity_index = 0)?`. + 2. Derive the secp256k1 pubkey for `(identity_index = 0, key_index = 0)` via `derive_ecdsa_identity_auth_keypair_from_master` (the same path `from_seed_for_identity` walks). + 3. Compute `let h160 = ripemd160_sha256(&pubkey)`. + 4. Build two `IdentityPublicKey`s for that derivation slot: + - `key_secp = IdentityPublicKey::V0(IdentityPublicKeyV0 { key_type: KeyType::ECDSA_SECP256K1, data: BinaryData::new(pubkey.to_vec()), .. })` + - `key_h160 = IdentityPublicKey::V0(IdentityPublicKeyV0 { key_type: KeyType::ECDSA_HASH160, data: BinaryData::new(h160.to_vec()), .. })` + 5. Probe both: + - `signer.can_sign_with(&key_secp)` and `signer.sign(&key_secp, b"msg").await` + - `signer.can_sign_with(&key_h160)` and `signer.sign(&key_h160, b"msg").await` +- **Assertions** (the proof shape): + - `signer.can_sign_with(&key_secp) == true` AND `signer.sign(&key_secp, b"msg").await.is_ok()` (sanity baseline — proves the cache IS populated for this slot). + - `signer.can_sign_with(&key_h160) == true` AND `signer.sign(&key_h160, b"msg").await.is_ok()` (the contract — `ECDSA_HASH160` is whitelisted by both match arms, so it must round-trip). + - Counter-assertion if buggy (today's behaviour): `signer.can_sign_with(&key_h160) == false` AND `signer.sign(&key_h160, b"msg").await` returns `Err(ProtocolError::Generic(msg))` where `msg.contains("not in pre-derived gap window")`. +- **Expected** (after fix): branch on `key.key_type()` before computing the cache key — for `ECDSA_HASH160` the lookup key is `key.data()` *as-is* (it's already the 20-byte hash); for `ECDSA_SECP256K1` it remains `ripemd160_sha256(key.data())`. Mirror the same fix in both `lookup_identity_secret` and `can_sign_with`. Equivalent fix: reject `ECDSA_HASH160` with a clear `unsupported key type` error and remove it from the match arms — the harness only ever produces `ECDSA_SECP256K1` keys via `derive_identity_key`, so `ECDSA_HASH160` support is currently aspirational dead code. +- **Actual** (current code): the harness signer claims to support `ECDSA_HASH160` (match arms at signer.rs:90 and signer.rs:116) but the lookup hashes the already-hashed `data` and fails every probe. The bug never triggers in *current* harness usage because `derive_identity_key` (signer.rs:182-191) hard-codes `key_type = ECDSA_SECP256K1` — but any future test that registers an identity with a hash-typed key, or any production caller that re-uses this signer (e.g. an SDK example wired to a chain identity that was registered by another wallet with an `ECDSA_HASH160` key), trips it. +- **Harness extensions required**: none — pure unit test on `SeedBackedIdentitySigner`. `derive_ecdsa_identity_auth_keypair_from_master` is already exposed via `platform_wallet::wallet::identity::network` (used by `derive_identity_key`). +- **Estimated complexity**: S +- **Rationale**: This is a "the type signature lies" bug. The match arms admit two key types; one of them silently never works. Either fix the lookup or shrink the match. Without a pin, the discrepancy survives until a real consumer hits it — and that consumer's failure mode is a confusing `not in pre-derived gap window` error on a key that demonstrably *is* in the gap window. The hash-level confusion (raw pubkey vs `ripemd160_sha256(pubkey)` vs `ripemd160_sha256(ripemd160_sha256(pubkey))`) is exactly the class of bug a pure-data unit test pins cheaply. + +#### Found-020 — PA-001b spec/impl drift: `output_change_address` parameter never landed in production +- **Priority**: P2 (spec-vs-impl pin — the missing feature is the bug) +- **Severity**: LOW (the wallet works; the spec describes a feature that does not exist, which is misleading documentation rather than a runtime bug) +- **Wallet feature exercised**: `wallet/platform_addresses/transfer.rs:31` (`PlatformAddressWallet::transfer`); the surrounding `InputSelection` API at `wallet/platform_addresses/mod.rs:30`. +- **Suspected bug**: TEST_SPEC.md PA-001b describes driving `transfer(...)` with an `output_change_address: Option` argument routing residual ("change") credits either to a wallet-derived default (`None`) or to an explicit address (`Some(addr)`). That parameter does not appear anywhere in the production signature — confirmed by `grep -rn 'output_change_address\|change_address' packages/rs-platform-wallet/src/`, which surfaces only Layer-1 (core) `next_change_address_for_account` paths. The current production change-output semantics are implicit: + - `InputSelection::Auto`: the auto-selector consumes `Σ outputs` exactly under the post-fix `Σ inputs == Σ outputs` invariant (commits `aaf8be74ee`, `9ea9e7033c`); residual stays on the selected input addresses, no separate change output. + - `InputSelection::Explicit(map)`: caller declares the consumed amount per input directly; residual stays on the input. + Neither branch surfaces an `output_change_address` parameter. +- **Preconditions**: none — this is a documentation / API-shape contract pin. +- **Scenario** (test as documentation drift assertion): + 1. Confirm by reflection (rustdoc / `syn` parse) that `PlatformAddressWallet::transfer`'s signature does NOT include an `output_change_address` parameter today. +- **Assertions** (the proof shape, two valid resolutions): + - **(a) Spec realignment**: TEST_SPEC.md PA-001b is rewritten to match the implicit-change semantics above, OR removed with a deletion-note. The Found-020 entry itself can then be removed alongside. + - **(b) Production extension**: `PlatformAddressWallet::transfer` gains an `output_change_address: Option` parameter wired through the auto-select path so PA-001b's two-branch behaviour becomes implementable. +- **Expected** (after resolution): the spec and the production API agree. Either the spec describes what the wallet does, or the wallet does what the spec describes. +- **Actual** (current state): PA-001b stays `#[ignore]`'d as `BLOCKED — feature missing in production`; the spec entry is preserved with a `**Status**:` flag so a human reviewer sees the drift at a glance, rather than discovering it by reading the test. +- **Harness extensions required**: none — the test will be straightforward `transfer(...)` + balance assertions once the production parameter exists. +- **Estimated complexity**: S (when unblocked). +- **Rationale**: The spec is one of the harness's load-bearing documents — test authors trust it as a description of the production API. A spec entry that describes a non-existent parameter erodes that trust. Filing the drift as Found-020 (and surfacing it via the PA-001b status field) makes the gap visible without forcing an immediate spec rewrite — the resolution can wait for a coordinated PA-001b implementation pass. + +--- + +## 4. Harness extension roadmap + +Aggregating "Harness extensions required" across §3 and proposing a build +order. Each wave unlocks the cases listed. + +### Wave A — Identity signer + identity setup helpers +- Add `SeedBackedIdentitySigner` implementing `Signer` in `framework/signer.rs` (DIP-9 derivation per `derive_ecdsa_identity_auth_keypair_from_master` at `wallet/identity/network/identity_handle.rs:143`). +- Add `derive_identity_key(seed_bytes, network, identity_index, key_index, purpose, security_level) -> IdentityPublicKey` test helper. +- Add `TestWallet::register_identity_from_addresses(funding: Credits) -> Identity` helper that builds the placeholder, calls `register_from_addresses`, and waits for on-chain visibility. +- Add `wait_for_identity_balance(identity_id, expected, timeout)` in `framework/wait.rs`. +- **Unlocks**: ID-001, ID-001c, ID-002, ID-003, ID-004, ID-005, ID-005b, ID-006, ID-006b, DPNS-001, DPNS-001b, DPNS-001c, DPNS-002 (partial), CT-001, DP-001, DP-001b, DP-001c, DP-002, DP-003, TK-001, TK-001b, TK-002, TK-003, TK-004, CN-001. + +### Wave B — Multi-identity per setup +- Extend `setup()` to accept `setup_with_n_identities(n: u32) -> SetupGuard { test_wallet, identities: Vec }`. +- **Unlocks**: ID-003, DP-002, DP-003. +- **Cost**: Wave A pre-requisite; ~150 LoC. + +### Wave C — Contract fixture loader +- `tests/fixtures/contracts/` directory + `framework::fixtures::load_contract(name)` helper. +- One canonical `minimal.json` (one doc type, two scalar fields). +- **Unlocks**: CT-001, CT-002, CT-003. + +### Wave D — Token contract operator config +- `Config::token_contract_id`, `Config::token_position`, optional `Config::token_claim_amount`. +- Operator pre-funds tokens to the bank-derived identity (one-time, README'd next to bank pre-funding). +- **Unlocks**: TK-001, TK-001b, TK-002, TK-003, TK-004. + +### Wave E — SPV re-enablement (Task #15) +- Uncomment SPV block in `harness.rs:200-218`; swap `TrustedHttpContextProvider` → `SpvContextProvider`. +- Add `SpvHealth::status()` accessor to manager. +- Add Core-funded test wallet helper (faucet integration). +- **Unlocks**: CR-001, CR-002, CR-003. + +### Wave F — Test-only utility helpers +- `TestWallet::transfer_with_inputs` (PA-002 negative variant; PA-004b exact-balance setup). +- `TestWallet::transfer_capturing_st_bytes` (PA-006, PA-006b). +- `TestWallet::estimate_transfer_fee` (PA-002b). +- `Bank::total_credits` accessor exposed (already exists, just lift to public re-export if not). +- `Bank::with_balance_for_test` constructor (PA-010). +- `TestRegistry::get_status(wallet_id)` (PA-004). +- `FUNDING_MUTEX` instrumentation hook (PA-008c). +- "Did we broadcast?" hook on the harness SDK (PA-004c, PA-013). +- Cancellation-point hook between broadcast and proof-fetch (Harness-G4). +- Test DAPI proxy / `httpmock` adapter (PA-013). +- **Unlocks**: PA-002 (negative), PA-002b, PA-004 (full assertions), PA-004b, PA-004c, PA-006, PA-006b, PA-008c, PA-009, PA-010, PA-011, PA-012, PA-013, Harness-G1a, Harness-G1b, Harness-G4. +- **Cost**: ~200-400 LoC across multiple commits; the test-DAPI-proxy and cancellation-hook items are non-trivial and can land late. + +**Recommended build order**: Wave A first (highest leverage — unblocks 25+ cases), then Wave F's cheap helpers (estimate-fee, transfer-with-inputs, registry status, FUNDING_MUTEX hook) which unblock most P2 PA cases, then Wave C, then Wave B as ID-003/DP-002 land. Wave F's expensive items (test DAPI proxy, cancellation hook) and Waves D/E are independent and can run in parallel with the others once a champion is assigned. + +### Wallet-API gap notes (follow-up issues) + +While drafting §3 the following minor public-API gaps were noted. None block +the spec but each would simplify a test if filed as a follow-up issue: + +1. **No `PlatformWallet::fee_paid` accessor** — every PA case derives the fee from `Σ funded - Σ received - Σ remaining`. A first-class `last_transfer_fee()` (or a `fee` field on `PlatformAddressChangeSet`) would let assertions read the fee directly. Currently noted as a comment in `cases/transfer.rs:142-147`. +2. **No public sync-watermark getter on `PlatformAddressWallet`** — PA-007 needs to read the provider's `last_known_recent_block` to assert monotonicity. The field is internal; exposing a `pub fn sync_watermark() -> Option` would unblock cleanly. +3. **`IdentityManager::known_identities()` shape** — needed by ID-001's "exactly one identity registered" assertion. If the manager exposes only `BTreeMap` without a length convenience, the test must pull internals; a `.len()` / `.identity_ids()` helper would be cleaner. +4. **Token-balance accessor by `(identity, contract, position)`** — `wallet/tokens/wallet.rs:248` already has `balance(...)`; confirm signature matches what TK-001 needs (`balance_for(identity_id, contract_id, position)`) and add the convenience if not. +5. **DPNS `register_name_with_external_signer` lacks a "wait for visibility" partner** — Wave A would benefit from a `wait_for_dpns_name_visible(name, timeout)` helper, ideally co-located with `wait_for_balance` in `framework/wait.rs`. +6. **No protocol-version accessor for `min_input_amount` / `max_outputs`** — PA-009 and PA-014 need to read these from the active `PlatformVersion`; expose a thin test-friendly getter. + +--- + +## 5. Out-of-scope register + +Explicit list of what this suite WILL NOT cover, with reasons. Each entry +prevents future scope creep arguments. + +1. **Shielded transfers** — entire `wallet/shielded/` surface. Reason: prover, viewing-key derivation, and note-selection are a parallel system; coverage belongs in a dedicated suite. Re-evaluate when shielded ships to mainnet. +2. **Credit withdrawals** (`wallet/identity/network/withdrawal.rs`, `wallet/platform_addresses/withdrawal.rs`) — withdrawal verification requires Layer-1 observation of the withdrawal tx. Blocked on Task #15 (SPV stabilisation). Defer. +3. **Token contract deployment** — no testnet contract registry; the suite assumes pre-deployed contracts via env config (Wave D). +4. **Asset-lock-funded identity registration** — the bank holds Platform credits, not Core UTXOs. The address-funded variant (ID-001) covers this need from the wallet's perspective; full asset-lock coverage stays with DET (`dash-evo-tool/tests/backend-e2e/identity_create.rs`). +5. **DAPI Core path** (`tx_is_ours`, mn-list diffs, peer behaviour) — DET territory; this suite tests the wallet against DAPI, not DAPI itself. +6. **Cross-process bank concurrency** — README §"Multi-process safety" documents the operator-side requirement; not a test concern. +7. **Mainnet runs** — config supports `network=mainnet` but the suite's bank-funded model is testnet-by-policy. Mainnet runs require an explicit operator review; out-of-scope for automation. +8. **CN-002 (masternode voting)** — needs a regtest-with-masternodes harness that doesn't exist today. +9. **Non-BIP-39 mnemonic / seed sources** — see §1.2. Mnemonics must be drawn from the BIP-39 English wordlist; raw-entropy and arbitrary-UTF-8 paths are out of scope. +10. **Clock-skew / wall-clock-dependent assertions** — testnet runners are assumed to have NTP. Tests that rely on chain timestamps assume the runner's wall clock is within a few seconds of chain time. Cases that need to assert behaviour under arbitrary skew belong in a unit-test layer below this suite. + +--- + +## 6. Open questions for product owner + +Each question's answer changes the spec; numbered for reference. + +1. **Token contract registry** — do we maintain one canonical testnet token contract for TK-001..TK-004, or do we rely on operators to provide their own via env? (Answer changes Wave D scope.) +2. **Contested-name coverage** — should CN-001 be promoted to P1, or do we accept DET parity and leave it P2/deferred? +3. **Long-running tests** — PA-005 (16 funding round-trips, ~3 min) is borderline. Do we accept multi-minute tests in the default `cargo test --test e2e` run, or gate them behind a `slow-tests` cargo feature? +4. **Identity withdrawal coverage** — once SPV (Task #15) lands, do we want withdrawal coverage here, or is that DET's exclusive territory? +5. **Mainnet smoke** — should the suite ever support a single, opt-in mainnet smoke case (e.g. PA-001 with a tiny `1_000`-credit transfer) for release-gate validation? +6. **Fee-bound numbers** — PA-003 asserts `fee_5 - fee_1 < 1_000_000`. Should we baseline empirical fee numbers and tighten these bounds in a follow-up, or keep them loose and rely on protocol-version bumps to reset them? +7. **Deterministic fixture network** — testnet is shared and noisy. Is there appetite to maintain a regtest-with-Drive cluster for CI exclusively, or do we accept testnet flakiness as the operating constraint? +8. **Test DAPI proxy infra** — PA-013 and the broadcast-retry contract require a controllable test DAPI proxy. Build it bespoke (`httpmock`-based), reuse an existing harness from elsewhere in the workspace, or defer the case until the proxy lands? +9. **Cancellation-hook plumbing** — Harness-G4 needs a test-only injection point between broadcast and proof-fetch. Acceptable to add a `cfg(test)` hook on the wallet, or must this stay external (wrap the future in a `select!` from the test side and accept coarser cancellation granularity)? + +--- + +Catalogued by Marvin (QA), with the resigned competence of someone who has read every line of this code twice. Edge-case expansion by Trillian, who knows that the difference between "tested" and "tested at the boundary" is the difference between "ships" and "ships back". diff --git a/packages/rs-platform-wallet/tests/e2e/cases/mod.rs b/packages/rs-platform-wallet/tests/e2e/cases/mod.rs new file mode 100644 index 00000000000..0f33d0b2d1b --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/cases/mod.rs @@ -0,0 +1,5 @@ +//! End-to-end test cases. Each submodule hosts +//! `#[tokio_shared_rt::test(shared)]` entries that share the +//! process-wide [`super::framework::E2eContext`]. + +pub mod transfer; diff --git a/packages/rs-platform-wallet/tests/e2e/cases/transfer.rs b/packages/rs-platform-wallet/tests/e2e/cases/transfer.rs new file mode 100644 index 00000000000..d76bfb5b208 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/cases/transfer.rs @@ -0,0 +1,198 @@ +//! Self-transfer of credits between two platform-payment addresses +//! owned by the same test wallet. +//! +//! Gated behind `#[ignore]` so a stock `cargo test -p platform-wallet` +//! (or workspace-wide invocation) stays green for contributors and CI +//! jobs that lack a funded testnet bank wallet, live DAPI access, and +//! the operator `.env`. Operator setup lives in `tests/.env` +//! (template: `tests/.env.example`); a missing +//! `PLATFORM_WALLET_E2E_BANK_MNEMONIC` would otherwise surface as a +//! [`FrameworkError::Bank`](crate::framework::FrameworkError::Bank) +//! during context init, escalated to a panic by `setup().expect(..)`. +//! +//! ```bash +//! cp packages/rs-platform-wallet/tests/.env.example \ +//! packages/rs-platform-wallet/tests/.env +//! # edit tests/.env to set PLATFORM_WALLET_E2E_BANK_MNEMONIC +//! cargo test --test e2e -- --ignored --nocapture +//! ``` + +use std::collections::BTreeMap; +use std::time::Duration; + +use crate::framework::prelude::*; + +// Sized to dodge platform #3040 — AddressFundsTransferTransition's +// `calculate_min_required_fee` returns the static +// `state_transition_min_fees` floor (~6.5M for 1in/1out) but Drive's +// chain-time fee includes storage + processing costs that scale with +// the operation set (~14.94M empirically for the same shape). With +// `[ReduceOutput(0)]`, `output[0]` absorbs the fee at chain time; +// if it's smaller than the realistic fee the broadcast fails with +// `AddressesNotEnoughFundsError`. Picking output amounts well above +// the empirical chain-time ceiling sidesteps the bug until #3040 +// lands at the dpp layer. + +/// Credits the bank delivers to `addr_1`. The bank uses +/// `[DeductFromInput(0)]`, so addr_1 receives this exact amount; +/// the bank's fee is absorbed by the bank's own input. Sized well +/// above the chain-time fee (~15M empirically) so addr_1 has +/// enough headroom for the self-transfer (see #3040 comment above). +const FUNDING_CREDITS: u64 = 100_000_000; + +/// Safety floor for the addr_1 wait. Under `[DeductFromInput(0)]` +/// addr_1 receives FUNDING_CREDITS exactly; the floor is kept as a +/// guard against an empty/stale observation slipping through. +const FUNDING_FLOOR: u64 = 70_000_000; + +/// Gross credits the test wallet submits in its self-transfer to +/// `addr_2`. Same `[ReduceOutput(0)]` semantics — addr_2 receives +/// `TRANSFER_CREDITS − transfer_fee`. Sized well above the +/// empirical chain-time fee (~15M) to avoid #3040. +const TRANSFER_CREDITS: u64 = 50_000_000; + +/// Lower bound on what addr_2 must receive before the assertions +/// run. A non-zero floor prevents an empty observation from +/// passing the wait. +const TRANSFER_FLOOR: u64 = 1_000_000; + +/// Per-step deadline for balance observations. +const STEP_TIMEOUT: Duration = Duration::from_secs(60); + +#[tokio_shared_rt::test(shared)] +#[ignore = "requires PLATFORM_WALLET_E2E_BANK_MNEMONIC and live testnet access; run with `cargo test -- --ignored`"] +async fn transfer_between_two_platform_addresses() { + let _ = tracing_subscriber::fmt() + .with_env_filter( + tracing_subscriber::EnvFilter::try_from_default_env() + .unwrap_or_else(|_| "info,platform_wallet=debug".into()), + ) + .with_test_writer() + .try_init(); + + let s = setup().await.expect("e2e setup failed"); + + // `next_unused_receive_address` advances the pool only once an + // address is observed used; derive `addr_2` AFTER `addr_1` is + // funded so the cursor lands on a fresh slot. + let addr_1 = s + .test_wallet + .next_unused_address() + .await + .expect("derive addr_1"); + + // Snapshot bank balance before funding so we can derive the fee + // the bank's input actually paid (invisible to the test wallet). + let bank_pre = s.ctx.bank().total_credits().await; + + s.ctx + .bank() + .fund_address(&addr_1, FUNDING_CREDITS) + .await + .expect("bank.fund_address"); + + // Bank uses `[DeductFromInput(0)]`: addr_1 receives FUNDING_CREDITS + // exactly. Wait on the safety floor; the exact-amount assertion + // follows after the test wallet syncs. + wait_for_balance(&s.test_wallet, &addr_1, FUNDING_FLOOR, STEP_TIMEOUT) + .await + .expect("addr_1 funding never observed"); + + let addr_2 = s + .test_wallet + .next_unused_address() + .await + .expect("derive addr_2"); + assert_ne!( + addr_1, addr_2, + "wallet must hand out a fresh address once addr_1 is observed used" + ); + + let outputs: BTreeMap<_, _> = std::iter::once((addr_2, TRANSFER_CREDITS)).collect(); + s.test_wallet + .transfer(outputs) + .await + .expect("self-transfer"); + + // addr_2 receives `TRANSFER_CREDITS − transfer_fee` (also + // `[ReduceOutput(0)]`). Wait on the post-fee floor. + wait_for_balance(&s.test_wallet, &addr_2, TRANSFER_FLOOR, STEP_TIMEOUT) + .await + .expect("addr_2 transfer never observed"); + + // Re-sync test wallet so the cached view reflects post-transfer + // state across BOTH addresses. + s.test_wallet + .sync_balances() + .await + .expect("post-transfer sync"); + let balances = s.test_wallet.balances().await; + let received = balances.get(&addr_2).copied().unwrap_or(0); + let remaining = balances.get(&addr_1).copied().unwrap_or(0); + // The transfer fee is the share TRANSFER_CREDITS lost while + // crossing addr_1 -> addr_2 via `[ReduceOutput(0)]`. + let transfer_fee = TRANSFER_CREDITS.saturating_sub(received); + + // Resync the bank to get its post-funding balance, then derive + // the fee the bank's input absorbed under `[DeductFromInput(0)]`. + s.ctx + .bank() + .sync_balances() + .await + .expect("bank post-funding sync"); + let bank_post = s.ctx.bank().total_credits().await; + // bank_pre - bank_post = FUNDING_CREDITS + bank_fee + let bank_fee = bank_pre + .saturating_sub(bank_post) + .saturating_sub(FUNDING_CREDITS); + + tracing::info!( + target: "platform_wallet::e2e::cases::transfer", + ?addr_1, + ?addr_2, + bank_pre, + bank_post, + funded = FUNDING_CREDITS, + received, + remaining, + bank_fee, + transfer_fee, + "post-transfer balance snapshot" + ); + + // Under [ReduceOutput(0)], the protocol deducts the transfer fee + // from output[0] — addr_2's received amount — not from addr_1's + // residual. So addr_1 retains FUNDING_CREDITS - TRANSFER_CREDITS + // and addr_2 receives TRANSFER_CREDITS - transfer_fee. + assert_eq!( + remaining, + FUNDING_CREDITS - TRANSFER_CREDITS, + "addr_1 must retain FUNDING_CREDITS - TRANSFER_CREDITS \ + (transfer_fee is deducted from addr_2's amount, not addr_1's residual). \ + observed remaining={remaining} expected={}", + FUNDING_CREDITS - TRANSFER_CREDITS, + ); + assert_eq!( + received, + TRANSFER_CREDITS - transfer_fee, + "addr_2 must receive TRANSFER_CREDITS minus the transfer fee \ + (ReduceOutput(0) deducts fee from the transferred amount). \ + observed received={received} expected={}", + TRANSFER_CREDITS - transfer_fee, + ); + assert!( + transfer_fee > 0, + "self-transfer must charge a non-zero fee (received={received})" + ); + assert!( + transfer_fee < TRANSFER_CREDITS, + "transfer fee implausibly high: {transfer_fee} >= TRANSFER_CREDITS ({TRANSFER_CREDITS})" + ); + assert!( + bank_fee > 0, + "bank funding must charge a non-zero fee to its own input \ + (bank_pre={bank_pre} bank_post={bank_post} funded={FUNDING_CREDITS})" + ); + + s.teardown().await.expect("teardown"); +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/bank.rs b/packages/rs-platform-wallet/tests/e2e/framework/bank.rs new file mode 100644 index 00000000000..c953e18d13d --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/bank.rs @@ -0,0 +1,244 @@ +//! Pre-funded bank wallet — funding source for every test wallet. +//! +//! Loaded from `PLATFORM_WALLET_E2E_BANK_MNEMONIC` at +//! `E2eContext::init` time. `fund_address` serialises in-process +//! calls on [`FUNDING_MUTEX`] so concurrent tests don't race nonces; +//! cross-process isolation is the operator's concern (distinct +//! mnemonic per environment, distinct workdir slot per process). + +use std::collections::BTreeMap; +use std::sync::Arc; + +use bip39::Mnemonic as Bip39Mnemonic; +use dpp::address_funds::PlatformAddress; +use dpp::fee::Credits; +use dpp::util::hash::ripemd160_sha256; +use dpp::version::PlatformVersion; +use key_wallet::{AccountType, ChildNumber, Network}; +use platform_wallet::wallet::persister::NoPlatformPersistence; +use platform_wallet::wallet::platform_addresses::InputSelection; +use platform_wallet::{ + PlatformAddressChangeSet, PlatformWallet, PlatformWalletError, PlatformWalletManager, +}; +use tokio::sync::Mutex as AsyncMutex; + +use simple_signer::signer::SimpleSigner; + +use super::config::Config; +use super::wallet_factory::{bank_fee_strategy, DEFAULT_ACCOUNT_INDEX_PUB, DEFAULT_KEY_CLASS_PUB}; +use super::{make_platform_signer, FrameworkError, FrameworkResult}; + +/// In-process funding mutex — serialises concurrent +/// `bank.fund_address` calls so nonces don't race. +static FUNDING_MUTEX: AsyncMutex<()> = AsyncMutex::const_new(()); + +/// Bank wallet handle wrapping a synced `PlatformWallet` and its +/// signer. All funding flows through `fund_address` so the +/// `FUNDING_MUTEX` invariant lives in one place. +pub struct BankWallet { + wallet: Arc, + signer: SimpleSigner, + /// Cached for under-funded panic messages and log breadcrumbs. + primary_receive_address: PlatformAddress, +} + +impl std::fmt::Debug for BankWallet { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("BankWallet") + .field("wallet_id", &hex::encode(self.wallet.wallet_id())) + .field("primary_receive_address", &self.primary_receive_address) + .finish_non_exhaustive() + } +} + +impl BankWallet { + /// Load the bank from its BIP-39 mnemonic, sync once, and check + /// the balance covers [`Config::min_bank_credits`]. + /// + /// Under-funded balances PANIC with a "top up at
" + /// pointer; surfacing one clear actionable failure beats burying + /// it under per-test "insufficient balance" errors. + pub async fn load( + manager: &Arc>, + config: &Config, + ) -> FrameworkResult { + if config.bank_mnemonic.trim().is_empty() { + return Err(FrameworkError::Bank( + "bank mnemonic is empty — set PLATFORM_WALLET_E2E_BANK_MNEMONIC".into(), + )); + } + // Validate up front and derive the 64-byte seed once so the + // seed-backed signer can pre-build its key cache below. + let validated: Bip39Mnemonic = + config.bank_mnemonic.parse().map_err(|err: bip39::Error| { + FrameworkError::Bank(format!("invalid BIP-39 mnemonic: {err}")) + })?; + let seed_bytes = validated.to_seed(""); + + let network = config.network; + let wallet = manager + .create_wallet_from_mnemonic( + &config.bank_mnemonic, + network, + key_wallet::wallet::initialization::WalletAccountCreationOptions::Default, + ) + .await + .map_err(wallet_err)?; + wallet.platform().initialize().await; + + // Seed balances; a sync failure here makes every test fail. + wallet + .platform() + .sync_balances(None) + .await + .map_err(wallet_err)?; + + // Pin the bank's sweep target to DIP-17 index 0 deterministically + // so the same address absorbs sweep-back funds across every test + // run. `next_unused_receive_address` would otherwise advance past + // index 0 once it gets marked used, accumulating empty addresses. + let primary_receive_address = derive_platform_address_at_index( + &wallet, + network, + DEFAULT_ACCOUNT_INDEX_PUB, + DEFAULT_KEY_CLASS_PUB, + 0, + ) + .await?; + + let total = wallet.platform().total_credits().await; + if total < config.min_bank_credits { + // Under-funded bank is a hard operator error; panic with + // the README's bank-pre-funding format so operators hit + // the same actionable pointer in CI as in the docs. + let address_bech32m = primary_receive_address.to_bech32m_string(network); + panic!( + "Bank wallet under-funded.\n \ + balance : {balance} credits\n \ + required: {required} credits\n \ + top up at: {address_bech32m}\n\ + \n\ + Send testnet platform credits to the address above, then re-run the tests.", + balance = total, + required = config.min_bank_credits, + ); + } + + let signer = make_platform_signer(&seed_bytes, network)?; + Ok(Self { + wallet, + signer, + primary_receive_address, + }) + } + + /// Borrow the underlying `PlatformWallet`. + pub fn platform_wallet(&self) -> &Arc { + &self.wallet + } + + /// Primary receive address — the sweep destination for + /// `cleanup::teardown_one`. + pub fn primary_receive_address(&self) -> &PlatformAddress { + &self.primary_receive_address + } + + /// Network the bank is operating against. + pub fn network(&self) -> Network { + self.wallet.sdk().network + } + + /// Fund `target` with `credits` from the bank's primary + /// account. + /// + /// Recipients receive the **exact** `credits` amount; the fee + /// is deducted from the bank's input via + /// [`bank_fee_strategy`]. The bank therefore consumes + /// `credits + fee` from its own platform-addresses pool — + /// verify the bank balance is sufficiently above + /// `min_bank_credits` before calling. + /// + /// Submits the transfer immediately and returns the resulting + /// [`PlatformAddressChangeSet`]. Does NOT wait for the chain to + /// observe the credit — callers follow up with + /// [`super::wait::wait_for_balance`] on the recipient wallet. + /// Concurrent in-process calls serialise on [`FUNDING_MUTEX`] + /// to avoid nonce races. + pub async fn fund_address( + &self, + target: &PlatformAddress, + credits: Credits, + ) -> FrameworkResult { + let _guard = FUNDING_MUTEX.lock().await; + let outputs: BTreeMap = + std::iter::once((*target, credits)).collect(); + self.wallet + .platform() + .transfer( + DEFAULT_ACCOUNT_INDEX_PUB, + InputSelection::Auto, + outputs, + bank_fee_strategy(), + Some(PlatformVersion::latest()), + &self.signer, + ) + .await + .map_err(wallet_err) + } + + /// Resync the bank's balances. + pub async fn sync_balances(&self) -> FrameworkResult<()> { + self.wallet + .platform() + .sync_balances(None) + .await + .map(|_| ()) + .map_err(wallet_err) + } + + /// Total credits the bank currently has cached. Reflects the + /// last sync — call [`Self::sync_balances`] first for a fresh + /// view. + pub async fn total_credits(&self) -> Credits { + self.wallet.platform().total_credits().await + } +} + +fn wallet_err(err: PlatformWalletError) -> FrameworkError { + FrameworkError::Wallet(err.to_string()) +} + +/// Derive the DIP-17 platform-payment address at `index` from the +/// already-loaded `PlatformWallet`, using path +/// `m/9'/coin_type'/17'/account'/key_class'/index`. +/// +/// Bank-only helper: lets us pin the bank's sweep target to index 0 +/// without going through the address pool's "next unused" cursor. +/// Routes through [`key_wallet::Wallet::derive_public_key`] on the live +/// wallet rather than re-running BIP-32 from raw seed bytes — keeps a +/// single derivation surface. +async fn derive_platform_address_at_index( + wallet: &Arc, + network: Network, + account: u32, + key_class: u32, + index: u32, +) -> FrameworkResult { + let account_path = AccountType::PlatformPayment { account, key_class } + .derivation_path(network) + .map_err(|err| FrameworkError::Bank(format!("DIP-17 account path: {err}")))?; + let leaf = ChildNumber::from_normal_idx(index) + .map_err(|err| FrameworkError::Bank(format!("invalid child index {index}: {err}")))?; + let leaf_path = account_path.extend([leaf]); + + let pubkey = wallet + .state() + .await + .wallet() + .derive_public_key(&leaf_path) + .map_err(|err| { + FrameworkError::Bank(format!("derive_public_key at index {index}: {err}")) + })?; + let pkh = ripemd160_sha256(&pubkey.serialize()); + Ok(PlatformAddress::P2pkh(pkh)) +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/cleanup.rs b/packages/rs-platform-wallet/tests/e2e/framework/cleanup.rs new file mode 100644 index 00000000000..68fe7d04612 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/cleanup.rs @@ -0,0 +1,434 @@ +//! Cleanup paths: startup [`sweep_orphans`] and per-test +//! [`teardown_one`]. Both reconstruct the wallet from the registry +//! seed, sync, and drain every fund source back to the bank by +//! walking the per-source-type sweep helpers. Best-effort: errors +//! are logged and the registry retains the entry for the next run. + +use std::collections::BTreeMap; +use std::sync::Arc; +use std::time::Duration; + +use dpp::address_funds::{AddressFundsFeeStrategyStep, PlatformAddress}; +use dpp::fee::Credits; +use dpp::identity::signer::Signer; +use dpp::state_transition::address_funds_transfer_transition::AddressFundsTransferTransition; +use dpp::version::PlatformVersion; +use key_wallet::wallet::initialization::WalletAccountCreationOptions; +use key_wallet::Network; +use platform_wallet::wallet::persister::NoPlatformPersistence; +use platform_wallet::wallet::platform_addresses::InputSelection; +use platform_wallet::{PlatformWallet, PlatformWalletError, PlatformWalletManager}; + +use super::bank::BankWallet; +use super::registry::{EntryStatus, PersistentTestWalletRegistry, RegistryEntry, WalletSeedHash}; +use super::wallet_factory::TestWallet; +use super::{make_platform_signer, FrameworkError, FrameworkResult}; + +/// Sweep gate: a wallet is only swept if its total balance can plausibly +/// satisfy the protocol's `min_input_amount`. Below that, no input can +/// pass `address_funds` validation and the broadcast would fail anyway. +/// Pulled from `PlatformVersion` rather than a hardcoded constant so we +/// stay in lock-step with whatever the active version dictates. +fn min_input_amount(version: &PlatformVersion) -> Credits { + version.dpp.state_transitions.address_funds.min_input_amount +} + +/// Default per-step timeout for cleanup polls. +pub const CLEANUP_STEP_TIMEOUT: Duration = Duration::from_secs(60); + +/// Sweep wallets left over from prior (likely panicked) runs. +/// For each registry entry: reconstruct the wallet, sync, drain to +/// the bank if above [`min_input_amount`], then drop the entry. +/// Per-entry failures mark the entry [`EntryStatus::Failed`] for +/// next-run retry; the loop never aborts. +pub async fn sweep_orphans( + manager: &Arc>, + bank: &BankWallet, + registry: &PersistentTestWalletRegistry, + network: Network, +) -> FrameworkResult { + let orphans = registry.list_orphans(); + if orphans.is_empty() { + return Ok(0); + } + tracing::info!( + count = orphans.len(), + "sweeping orphan test wallets from prior runs" + ); + + let mut swept = 0usize; + for (hash, entry) in orphans { + match sweep_one(manager, bank, &hash, &entry, network).await { + Ok(()) => { + if let Err(err) = registry.remove(&hash) { + tracing::warn!( + wallet_id = %hex::encode(hash), + error = %err, + "swept funds but failed to drop registry entry" + ); + } + swept += 1; + } + Err(err) => { + tracing::warn!( + wallet_id = %hex::encode(hash), + error = %err, + "sweep failed; entry retained for next-run retry" + ); + let _ = registry.set_status(&hash, EntryStatus::Failed); + } + } + } + Ok(swept) +} + +async fn sweep_one( + manager: &Arc>, + bank: &BankWallet, + hash: &WalletSeedHash, + entry: &RegistryEntry, + network: Network, +) -> FrameworkResult<()> { + let seed_bytes: [u8; 64] = parse_seed_hex(&entry.seed_hex)?; + let wallet = manager + .create_wallet_from_seed_bytes(network, seed_bytes, WalletAccountCreationOptions::Default) + .await + .map_err(wallet_err)?; + if wallet.wallet_id() != *hash { + return Err(FrameworkError::Cleanup(format!( + "registry hash mismatch for sweep: expected {} got {}", + hex::encode(hash), + hex::encode(wallet.wallet_id()) + ))); + } + wallet.platform().initialize().await; + wallet + .platform() + .sync_balances(None) + .await + .map_err(wallet_err)?; + let signer = make_platform_signer(&seed_bytes, network)?; + + let platform_version = PlatformVersion::latest(); + let dust_gate = min_input_amount(platform_version); + let total = wallet.platform().total_credits().await; + if total >= dust_gate { + sweep_platform_addresses(&wallet, &signer, bank.primary_receive_address()).await?; + } else { + tracing::debug!( + wallet_id = %hex::encode(hash), + total, + min_input = dust_gate, + "orphan platform total below protocol min_input_amount; skipping" + ); + } + sweep_identities(&wallet).await?; + sweep_core_addresses(&wallet).await?; + sweep_unused_core_asset_locks(&wallet).await?; + sweep_shielded(&wallet).await?; + + // Best-effort manager unregister so SPV stops tracking the + // wallet's addresses on subsequent passes. + if let Err(err) = manager.remove_wallet(hash).await { + tracing::warn!( + target: "platform_wallet::e2e::cleanup", + wallet_id = %hex::encode(hash), + error = %err, + "manager unregister failed after sweep; wallet remains tracked" + ); + } + Ok(()) +} + +/// Per-test teardown: drain back to bank, drop the registry entry, +/// and unregister from the manager. Best-effort — failures retain +/// the entry so the next startup's [`sweep_orphans`] retries. +pub async fn teardown_one( + manager: &Arc>, + bank: &BankWallet, + registry: &PersistentTestWalletRegistry, + test_wallet: &TestWallet, +) -> FrameworkResult<()> { + test_wallet.sync_balances().await?; + let platform_version = PlatformVersion::latest(); + let dust_gate = min_input_amount(platform_version); + let total = test_wallet.total_credits().await; + if total >= dust_gate { + sweep_platform_addresses( + test_wallet.platform_wallet(), + test_wallet.address_signer(), + bank.primary_receive_address(), + ) + .await?; + } else { + tracing::debug!( + wallet_id = %hex::encode(test_wallet.id()), + total, + min_input = dust_gate, + "test wallet total below protocol min_input_amount; skipping platform sweep" + ); + } + sweep_identities(test_wallet.platform_wallet()).await?; + sweep_core_addresses(test_wallet.platform_wallet()).await?; + sweep_unused_core_asset_locks(test_wallet.platform_wallet()).await?; + sweep_shielded(test_wallet.platform_wallet()).await?; + + // Drop the registry entry first so an unregister failure + // doesn't leak it; the wallet has no balance left to recover. + registry.remove(&test_wallet.id())?; + if let Err(err) = manager.remove_wallet(&test_wallet.id()).await { + tracing::warn!( + target: "platform_wallet::e2e::cleanup", + wallet_id = %hex::encode(test_wallet.id()), + error = %err, + "manager unregister failed after teardown; wallet remains tracked" + ); + } + Ok(()) +} + +/// Parse the registry's hex-encoded 64-byte seed. Bad length / +/// non-hex surfaces as [`FrameworkError::Cleanup`] so the entry +/// is marked failed rather than panicking the sweep. +fn parse_seed_hex(hex_str: &str) -> FrameworkResult<[u8; 64]> { + let bytes = hex::decode(hex_str) + .map_err(|err| FrameworkError::Cleanup(format!("invalid seed hex: {err}")))?; + let arr: [u8; 64] = bytes.try_into().map_err(|v: Vec| { + FrameworkError::Cleanup(format!("seed hex length {} != 64", v.len())) + })?; + Ok(arr) +} + +fn wallet_err(err: PlatformWalletError) -> FrameworkError { + FrameworkError::Wallet(err.to_string()) +} + +/// Drain every recoverable platform address back to `bank_addr` in a +/// single transition. Inputs map = balances ≥ `min_input_amount`, +/// output = the sum, fee comes out of the bank's incoming amount via +/// `ReduceOutput(0)`. +/// +/// Tests that distribute funds across multiple addresses (PA-004b +/// dust-boundary, PA-009 min-input) leave change on every spent +/// address; the sweep must walk the full balance map. Addresses +/// below `min_input_amount` are intentionally skipped — the protocol +/// rejects any transition that includes a sub-floor input, and +/// sweeping a dust address is impossible by definition. +async fn sweep_platform_addresses( + wallet: &Arc, + signer: &S, + bank_addr: &PlatformAddress, +) -> FrameworkResult<()> +where + S: Signer + Send + Sync, +{ + let platform_version = PlatformVersion::latest(); + let candidates: Vec<(PlatformAddress, Credits)> = + wallet.platform().addresses_with_balances().await; + let SweepPlan { + inputs, + skipped_dust, + .. + } = build_sweep_plan(&candidates, platform_version); + + if !skipped_dust.is_empty() { + let stranded: Credits = skipped_dust.iter().map(|(_, v)| *v).sum(); + tracing::warn!( + target: "platform_wallet::e2e::cleanup", + wallet_id = %hex::encode(wallet.wallet_id()), + stranded_count = skipped_dust.len(), + stranded_total = stranded, + min_input = min_input_amount(platform_version), + "sweep skipping addresses below min_input_amount" + ); + } + + if inputs.is_empty() { + tracing::debug!( + target: "platform_wallet::e2e::cleanup", + wallet_id = %hex::encode(wallet.wallet_id()), + "sweep_platform_addresses: no recoverable inputs; nothing to sweep" + ); + return Ok(()); + } + + let total: Credits = inputs.values().sum(); + let estimated_fee = + AddressFundsTransferTransition::estimate_min_fee(inputs.len(), 1, platform_version); + if total <= estimated_fee { + tracing::warn!( + target: "platform_wallet::e2e::cleanup", + wallet_id = %hex::encode(wallet.wallet_id()), + total, + estimated_fee, + "sweep_platform_addresses: Σ recoverable ≤ estimated fee; skipping" + ); + return Ok(()); + } + + let outputs: BTreeMap = + std::iter::once((*bank_addr, total)).collect(); + let fee_strategy = vec![AddressFundsFeeStrategyStep::ReduceOutput(0)]; + + tracing::debug!( + target: "platform_wallet::e2e::cleanup", + wallet_id = %hex::encode(wallet.wallet_id()), + total, + input_count = inputs.len(), + "sweep_platform_addresses: ReduceOutput(0) sweep" + ); + + wallet + .platform() + .transfer( + super::wallet_factory::DEFAULT_ACCOUNT_INDEX_PUB, + InputSelection::Explicit(inputs), + outputs, + fee_strategy, + Some(platform_version), + signer, + ) + .await + .map_err(wallet_err)?; + Ok(()) +} + +/// Result of partitioning the wallet's per-address balances into a +/// recoverable input set and the dust set that falls below the +/// per-input protocol floor. Output by [`build_sweep_plan`]. +#[derive(Debug, Default, PartialEq, Eq)] +struct SweepPlan { + inputs: BTreeMap, + skipped_dust: Vec<(PlatformAddress, Credits)>, +} + +/// Pure helper: split per-address balances into sweep inputs (balance +/// ≥ `min_input_amount`) and the dust set that would be rejected as +/// a sub-floor input. Empty / zero balances are dropped silently. +fn build_sweep_plan( + candidates: &[(PlatformAddress, Credits)], + platform_version: &PlatformVersion, +) -> SweepPlan { + let floor = min_input_amount(platform_version); + let mut inputs: BTreeMap = BTreeMap::new(); + let mut skipped_dust: Vec<(PlatformAddress, Credits)> = Vec::new(); + for (addr, balance) in candidates { + if *balance == 0 { + continue; + } + if *balance >= floor { + inputs.insert(*addr, *balance); + } else { + skipped_dust.push((*addr, *balance)); + } + } + SweepPlan { + inputs, + skipped_dust, + } +} + +/// Drain identity credit balances back to the bank identity. Noop until +/// the identity-transfer wiring lands. +// TODO(rs-platform-wallet/e2e #identity-sweep): implement once a +// Signer is wired through `TestWallet` and the +// CreditTransfer transition is reachable from this harness. +async fn sweep_identities(_wallet: &Arc) -> FrameworkResult<()> { + Ok(()) +} + +/// Drain core (Layer 1) UTXOs to the bank's core address. Noop until +/// the SPV wallet runtime is back online in this harness. +// TODO(rs-platform-wallet/e2e #core-sweep): implement once the SPV +// runtime (Task #15) lets us sign and broadcast core transactions. +async fn sweep_core_addresses(_wallet: &Arc) -> FrameworkResult<()> { + Ok(()) +} + +/// Consume unspent asset-lock outputs and refund their credits to the +/// bank. Noop until the asset-lock harness is wired up. +// TODO(rs-platform-wallet/e2e #asset-lock-sweep): walk the wallet's +// unused asset-lock proofs and either redeem-to-identity or burn back +// to bank-controlled core funds. +async fn sweep_unused_core_asset_locks(_wallet: &Arc) -> FrameworkResult<()> { + Ok(()) +} + +/// Drain the wallet's shielded note set to the bank's shielded address. +/// Noop until the shielded-prover harness is wired up. +// TODO(rs-platform-wallet/e2e #shielded-sweep): build a shield/unshield +// transition that empties the note set into a bank-controlled note. +async fn sweep_shielded(_wallet: &Arc) -> FrameworkResult<()> { + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + fn addr(byte: u8) -> PlatformAddress { + PlatformAddress::P2pkh([byte; 20]) + } + + /// Mixed: one above the floor, one dust. The above-floor address + /// becomes the only input; the dust is reported as stranded. + #[test] + fn build_sweep_plan_drops_dust_keeps_recoverable() { + let pv = PlatformVersion::latest(); + let floor = min_input_amount(pv); + let big = addr(0x01); + let dust = addr(0x02); + let candidates = vec![(big, floor + 100), (dust, floor.saturating_sub(1))]; + let plan = build_sweep_plan(&candidates, pv); + assert_eq!(plan.inputs.len(), 1); + assert_eq!(plan.inputs.get(&big).copied(), Some(floor + 100)); + assert_eq!(plan.skipped_dust, vec![(dust, floor.saturating_sub(1))]); + } + + /// Both addresses above the floor: each becomes an input. This + /// pins the multi-input sweep path that the original addr_1-only + /// behaviour would have skipped. + #[test] + fn build_sweep_plan_keeps_two_above_floor() { + let pv = PlatformVersion::latest(); + let floor = min_input_amount(pv); + let a = addr(0x01); + let b = addr(0x02); + let candidates = vec![(a, floor + 1_000), (b, floor + 2_000)]; + let plan = build_sweep_plan(&candidates, pv); + assert_eq!(plan.inputs.len(), 2); + assert_eq!(plan.skipped_dust.len(), 0); + let total: Credits = plan.inputs.values().sum(); + assert_eq!(total, 2 * floor + 3_000); + } + + /// All addresses below the floor: no inputs, all marked dust. + /// `sweep_platform_addresses` will short-circuit with no broadcast. + #[test] + fn build_sweep_plan_all_dust_yields_no_inputs() { + let pv = PlatformVersion::latest(); + let floor = min_input_amount(pv); + // Floor is small enough that this can fail on PlatformVersions + // where it's at zero — guard against that pathology. + if floor == 0 { + return; + } + let a = addr(0x01); + let b = addr(0x02); + let candidates = vec![(a, floor - 1), (b, floor / 2)]; + let plan = build_sweep_plan(&candidates, pv); + assert!(plan.inputs.is_empty()); + assert_eq!(plan.skipped_dust.len(), 2); + } + + /// Zero balances are silently dropped from both buckets; they + /// represent addresses already swept on a previous pass. + #[test] + fn build_sweep_plan_drops_zero_balances() { + let pv = PlatformVersion::latest(); + let candidates = vec![(addr(0x01), 0), (addr(0x02), 0)]; + let plan = build_sweep_plan(&candidates, pv); + assert!(plan.inputs.is_empty()); + assert!(plan.skipped_dust.is_empty()); + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/config.rs b/packages/rs-platform-wallet/tests/e2e/framework/config.rs new file mode 100644 index 00000000000..ee1f2cae45c --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/config.rs @@ -0,0 +1,250 @@ +//! Test framework configuration. Centralises every +//! `PLATFORM_WALLET_E2E_*` env var; loadable via [`Config::from_env`] +//! or constructed programmatically via [`Config::new`]. +//! +//! Both constructors return a fully-resolved [`Config`]: every +//! defaultable field already carries its final value (no +//! `read-then-derive` lookups left for callers). `network` is parsed +//! once into [`Network`]; `p2p_port` is resolved against the +//! network-specific default at construction time. + +use std::path::PathBuf; +use std::str::FromStr; + +use dashcore::Network; + +use super::{FrameworkError, FrameworkResult}; + +/// Environment variable names read by [`Config::from_env`]. +pub mod vars { + /// BIP-39 bank-wallet mnemonic. Required. + pub const BANK_MNEMONIC: &str = "PLATFORM_WALLET_E2E_BANK_MNEMONIC"; + /// Network selector: `testnet` (default) / `mainnet` / `devnet` / `local`. + pub const NETWORK: &str = "PLATFORM_WALLET_E2E_NETWORK"; + /// Comma-separated list of DAPI addresses overriding the + /// network default. + pub const DAPI_ADDRESSES: &str = "PLATFORM_WALLET_E2E_DAPI_ADDRESSES"; + /// Minimum bank balance (credits) required at startup. + pub const MIN_BANK_CREDITS: &str = "PLATFORM_WALLET_E2E_MIN_BANK_CREDITS"; + /// Workdir base path; slot fallback adds `-N` suffixes. + pub const WORKDIR: &str = "PLATFORM_WALLET_E2E_WORKDIR"; + /// Optional override for the trusted HTTP context provider URL. + /// Defaults to the network-builtin endpoint when unset. + pub const TRUSTED_CONTEXT_URL: &str = "PLATFORM_WALLET_E2E_TRUSTED_CONTEXT_URL"; + /// Optional override for the SPV P2P port. Unset falls back to + /// the network default (mainnet 9999, testnet 19999); regtest and + /// devnet have no default and require this var. + pub const P2P_PORT: &str = "PLATFORM_WALLET_E2E_P2P_PORT"; +} + +/// Default minimum bank balance in credits. +/// +/// Set at 5x the largest single-run cost (FUNDING_CREDITS=100M + ~15M chain-time +/// fee ≈ 115M per run) following DET's safety-factor pattern (dash-evo-tool#513). +/// Keeps the bank covering several consecutive runs even with the fee underestimate +/// from platform #3040 in play. +pub const DEFAULT_MIN_BANK_CREDITS: u64 = 500_000_000; + +/// E2E framework configuration — fully resolved. +/// +/// Every field carries its final value as of construction; callers +/// don't have to re-derive defaults. `network` is parsed; `p2p_port` +/// is the resolved port (override-or-default) — `None` only when the +/// network has no default and no override was supplied (regtest / +/// devnet without explicit configuration). +/// +/// The `Debug` impl below is hand-written: a `derive(Debug)` would +/// print `bank_mnemonic` verbatim, which a stray +/// `tracing::info!("{config:?}")` or an `expect()` panic could leak +/// into CI logs. +#[derive(Clone)] +pub struct Config { + /// BIP-39 bank mnemonic. Required. + pub bank_mnemonic: String, + /// Active network — parsed at construction. + pub network: Network, + /// Optional DAPI address overrides; empty means use the + /// network default list. + pub dapi_addresses: Vec, + /// Minimum bank balance threshold (credits). + pub min_bank_credits: u64, + /// Workdir base path; slot fallback adds `-N` suffixes. + pub workdir_base: PathBuf, + /// Optional trusted-context-provider URL override. `None` uses + /// the per-network default; devnet requires this override. + pub trusted_context_url: Option, + /// SPV P2P port for the active network — resolved at construction + /// time from the env override or the network default. `None` only + /// when the network has no default and no override was provided + /// (regtest / devnet without explicit configuration); the SPV + /// peer-seeding path treats that as "skip and fall back to DNS + /// discovery." + pub p2p_port: Option, +} + +impl std::fmt::Debug for Config { + /// Redacts `bank_mnemonic`. Logs and panic backtraces would + /// otherwise leak the shared funding seed into CI artifacts. + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Config") + .field("bank_mnemonic", &"") + .field("network", &self.network) + .field("dapi_addresses", &self.dapi_addresses) + .field("min_bank_credits", &self.min_bank_credits) + .field("workdir_base", &self.workdir_base) + .field("trusted_context_url", &self.trusted_context_url) + .field("p2p_port", &self.p2p_port) + .finish() + } +} + +impl Default for Config { + fn default() -> Self { + let network = Network::Testnet; + Self { + bank_mnemonic: String::new(), + network, + dapi_addresses: Vec::new(), + min_bank_credits: DEFAULT_MIN_BANK_CREDITS, + workdir_base: default_workdir_base(), + trusted_context_url: None, + p2p_port: default_p2p_port(network), + } + } +} + +impl Config { + /// Load from environment variables, with `.env` at + /// `${CARGO_MANIFEST_DIR}/tests/.env` as a CWD-independent + /// fallback. `bank_mnemonic` is required; everything else + /// resolves to its final value via the per-field defaults. + pub fn from_env() -> FrameworkResult { + // Anchor the `.env` path at the crate's manifest dir so + // CWD doesn't change behaviour; a missing file is expected. + let path: String = env!("CARGO_MANIFEST_DIR").to_owned() + "/tests/.env"; + if let Err(err) = dotenvy::from_path(&path) { + tracing::warn!( + target: "platform_wallet::e2e::config", + path = %path, + ?err, + "failed to load e2e .env (process env vars still apply)" + ); + } + + let bank_mnemonic = std::env::var(vars::BANK_MNEMONIC).map_err(|_| { + FrameworkError::Bank(format!( + "{} not set — point it at a BIP-39 testnet mnemonic with at least \ + {} pre-funded credits and re-run", + vars::BANK_MNEMONIC, + DEFAULT_MIN_BANK_CREDITS + )) + })?; + + let network = match std::env::var(vars::NETWORK) { + Ok(raw) => parse_network(&raw)?, + Err(_) => Network::Testnet, + }; + + let dapi_addresses = std::env::var(vars::DAPI_ADDRESSES) + .ok() + .map(|raw| { + raw.split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect::>() + }) + .unwrap_or_default(); + + let min_bank_credits = match std::env::var(vars::MIN_BANK_CREDITS) { + Ok(raw) => raw.trim().parse::().map_err(|err| { + FrameworkError::Bank(format!( + "{} = {raw:?} is not a valid u64: {err}", + vars::MIN_BANK_CREDITS + )) + })?, + Err(_) => DEFAULT_MIN_BANK_CREDITS, + }; + + let workdir_base = std::env::var(vars::WORKDIR) + .map(PathBuf::from) + .unwrap_or_else(|_| default_workdir_base()); + + let trusted_context_url = std::env::var(vars::TRUSTED_CONTEXT_URL) + .ok() + .map(|raw| raw.trim().to_string()) + .filter(|s| !s.is_empty()); + + let p2p_port = match std::env::var(vars::P2P_PORT) { + Ok(raw) => { + let trimmed = raw.trim(); + if trimmed.is_empty() { + default_p2p_port(network) + } else { + Some(trimmed.parse::().map_err(|err| { + FrameworkError::Config(format!( + "{} = {raw:?} is not a valid u16 port: {err}", + vars::P2P_PORT + )) + })?) + } + } + Err(_) => default_p2p_port(network), + }; + + Ok(Self { + bank_mnemonic, + network, + dapi_addresses, + min_bank_credits, + workdir_base, + trusted_context_url, + p2p_port, + }) + } + + /// Programmatic constructor — mirrors [`Config::from_env`] for + /// test harnesses that don't route through env vars. Returns a + /// fully-resolved config: `network` defaults to testnet and + /// `p2p_port` to the testnet default (19999). + pub fn new(bank_mnemonic: String) -> Self { + Self { + bank_mnemonic, + ..Self::default() + } + } +} + +/// `${TMPDIR}/dash-platform-wallet-e2e` — default workdir base +/// before slot-fallback. +fn default_workdir_base() -> PathBuf { + std::env::temp_dir().join("dash-platform-wallet-e2e") +} + +/// Network-default SPV P2P port. Mirrors the canonical mainnet (9999) +/// and testnet (19999) ports. Returns `None` for regtest / devnet — +/// those have site-specific ports and must be supplied via +/// [`vars::P2P_PORT`]. Used only at [`Config`] construction; callers +/// read the resolved [`Config::p2p_port`] directly. +fn default_p2p_port(network: Network) -> Option { + match network { + Network::Mainnet => Some(9999), + Network::Testnet => Some(19999), + _ => None, + } +} + +/// Parse a network string supporting the canonical dashcore names +/// plus the test-harness `local` alias for regtest and an empty +/// shorthand for testnet. Used only at [`Config`] construction; +/// callers read the resolved [`Config::network`] directly. +fn parse_network(s: &str) -> FrameworkResult { + let trimmed = s.trim(); + if trimmed.is_empty() { + return Ok(Network::Testnet); + } + if trimmed.eq_ignore_ascii_case("local") { + return Ok(Network::Regtest); + } + Network::from_str(trimmed) + .map_err(|e| FrameworkError::Config(format!("invalid network {trimmed:?}: {e}"))) +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/context_provider.rs b/packages/rs-platform-wallet/tests/e2e/framework/context_provider.rs new file mode 100644 index 00000000000..bd6280121e0 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/context_provider.rs @@ -0,0 +1,105 @@ +//! SDK [`ContextProvider`] backed by the local SPV runtime. +//! +//! Currently unused: the harness wires +//! [`rs_sdk_trusted_context_provider::TrustedHttpContextProvider`] +//! instead. Kept compilable for re-enablement (Task #15). +//! +//! Bridges the synchronous `ContextProvider::get_quorum_public_key` +//! to the async SPV API via [`dash_async::block_on`], which handles +//! the no-runtime / current-thread / multi-thread flavors. +//! Data-contract and token-configuration lookups return `Ok(None)` +//! so the SDK falls back to a network fetch — quorum keys are the +//! only thing local SPV state can answer authoritatively. + +use std::sync::Arc; + +use dpp::data_contract::associated_token::token_configuration::TokenConfiguration; +use dpp::data_contract::DataContract; +use dpp::prelude::{CoreBlockHeight, Identifier}; +use dpp::version::PlatformVersion; +use platform_wallet::SpvRuntime; + +use dash_sdk::error::ContextProviderError; +use dash_sdk::platform::ContextProvider; + +/// Platform activation height returned by +/// [`SpvContextProvider::get_platform_activation_height`]. +/// +/// Hard-coded to `0` for the testnet-only e2e scope: mn_rr +/// activation on testnet sits well past any height this flow +/// compares against, so a conservative `0` is safe-by-position. +/// Mainnet / activation-height-sensitive flows must surface the +/// real value via [`SpvRuntime`] after `QRInfo`. +const PLATFORM_ACTIVATION_HEIGHT_TESTNET_SAFE: CoreBlockHeight = 0; + +/// SDK [`ContextProvider`] that resolves quorum public keys from the +/// local SPV runtime. +#[derive(Debug, Clone)] +pub struct SpvContextProvider { + spv_runtime: Arc, +} + +impl SpvContextProvider { + /// Wrap an [`Arc`] in a fresh provider. + pub fn new(spv_runtime: Arc) -> Self { + Self { spv_runtime } + } + + /// Borrow the underlying SPV runtime. + pub fn spv(&self) -> &Arc { + &self.spv_runtime + } +} + +impl ContextProvider for SpvContextProvider { + /// Bridge SDK proof verification to the SPV masternode-list state + /// via [`dash_async::block_on`]. + fn get_quorum_public_key( + &self, + quorum_type: u32, + quorum_hash: [u8; 32], + core_chain_locked_height: u32, + ) -> Result<[u8; 48], ContextProviderError> { + // `block_on` requires `Future: Send + 'static`; outer Result + // is the bridge error, inner is the SPV's own — both fold + // into `InvalidQuorum` for the SDK. + let spv = Arc::clone(&self.spv_runtime); + let inner = dash_async::block_on(async move { + spv.get_quorum_public_key(quorum_type, quorum_hash, core_chain_locked_height) + .await + }) + .map_err(|e| { + ContextProviderError::InvalidQuorum(format!( + "SPV quorum lookup bridge failed (type={quorum_type}, \ + height={core_chain_locked_height}): {e}" + )) + })?; + inner.map_err(|e| { + ContextProviderError::InvalidQuorum(format!( + "SPV quorum lookup failed (type={quorum_type}, \ + height={core_chain_locked_height}): {e}" + )) + }) + } + + /// Defer to the SDK's network fetch (`None` == "not cached"). + fn get_data_contract( + &self, + _id: &Identifier, + _platform_version: &PlatformVersion, + ) -> Result>, ContextProviderError> { + Ok(None) + } + + /// Defer to the SDK's network fetch (see `get_data_contract`). + fn get_token_configuration( + &self, + _id: &Identifier, + ) -> Result, ContextProviderError> { + Ok(None) + } + + fn get_platform_activation_height(&self) -> Result { + Ok(PLATFORM_ACTIVATION_HEIGHT_TESTNET_SAFE) + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/harness.rs b/packages/rs-platform-wallet/tests/e2e/framework/harness.rs new file mode 100644 index 00000000000..91bb50ccd87 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/harness.rs @@ -0,0 +1,184 @@ +//! Process-shared `E2eContext` initialised once per test run via +//! [`tokio::sync::OnceCell`]. Single entry point: [`E2eContext::init`] +//! wires config → workdir slot → SDK (with +//! [`TrustedHttpContextProvider`]) → manager → bank → registry → +//! startup sweep. +//! +//! SPV-based context provider currently disabled; re-enable by +//! uncommenting the SPV blocks in `Self::build` (Task #15). + +use std::fs::File; +use std::path::PathBuf; +use std::sync::Arc; + +// `SpvRuntime` is held in an `Option` for SPV re-enablement +// (Task #15); the corresponding helpers stay compilable. +use platform_wallet::wallet::persister::NoPlatformPersistence; +use platform_wallet::{PlatformEventHandler, PlatformWalletManager, SpvRuntime}; +use tokio::sync::OnceCell; +use tokio_util::sync::CancellationToken; + +use super::bank::BankWallet; +use super::cleanup; +use super::config::Config; +use super::registry::PersistentTestWalletRegistry; +use super::sdk; +use super::wait_hub::WaitEventHub; +use super::workdir; +use super::FrameworkResult; + +/// Process-shared singleton populated on first +/// [`E2eContext::init`]. +static CTX: OnceCell = OnceCell::const_new(); + +/// Process-shared context. Tests obtain a `&'static E2eContext` +/// via [`super::setup`]; lazy init enforces the +/// "one bank + one SPV runtime per process" invariant. +pub struct E2eContext { + pub config: Config, + pub workdir: PathBuf, + /// `flock`-held lock kept open for the context's lifetime so + /// concurrent processes pick a different slot. Dropping it + /// releases the lock. + workdir_lock: File, + pub sdk: Arc, + pub manager: Arc>, + /// `None` while the SPV-based context provider is deferred + /// (Task #15); shape kept stable for future re-enablement. + pub spv_runtime: Option>, + pub bank: BankWallet, + pub registry: PersistentTestWalletRegistry, + /// Framework-wide shutdown signal for background tasks. Not + /// tripped by individual test panics — a single failing test + /// must not cancel SPV / wait helpers for sibling tests. + pub cancel_token: CancellationToken, + /// Installed as the harness's `PlatformEventHandler`; test + /// wallets clone the `Arc` so `wait_for_balance` wakes on real + /// events instead of fixed polling. + pub wait_hub: Arc, +} + +impl E2eContext { + /// Lazily build (or reuse) the process-shared context. + /// Concurrent callers serialise inside `OnceCell` — exactly one + /// build runs. + pub async fn init() -> FrameworkResult<&'static Self> { + CTX.get_or_try_init(Self::build).await + } + + pub fn sdk(&self) -> &Arc { + &self.sdk + } + + pub fn manager(&self) -> &Arc> { + &self.manager + } + + /// Pre-funded bank wallet — the funding source for tests. + pub fn bank(&self) -> &BankWallet { + &self.bank + } + + /// Persistent test-wallet registry — every `setup` registers, + /// every `teardown` removes its entry. + pub fn registry(&self) -> &PersistentTestWalletRegistry { + &self.registry + } + + /// `None` while the SPV-based context provider is deferred + /// (Task #15). + pub fn spv(&self) -> Option<&Arc> { + self.spv_runtime.as_ref() + } + + /// Framework-shutdown signal; background helpers can `select!` + /// on it for graceful shutdown. + pub fn cancel_token(&self) -> &CancellationToken { + &self.cancel_token + } + + pub fn wait_hub(&self) -> &Arc { + &self.wait_hub + } + + async fn build() -> FrameworkResult { + let config = Config::from_env()?; + + let (workdir, workdir_lock) = workdir::pick_available_workdir(&config.workdir_base)?; + + let cancel_token = CancellationToken::new(); + + let sdk = sdk::build_sdk(&config)?; + + // Persister discards changesets (testnet re-sync is fast). + // Event handler is the shared [`WaitEventHub`] so test + // helpers can await on real events instead of fixed polling. + let persister: Arc = Arc::new(NoPlatformPersistence); + let wait_hub = Arc::new(WaitEventHub::new()); + let event_handler: Arc = Arc::clone(&wait_hub) as _; + + let manager = Arc::new(PlatformWalletManager::new( + Arc::clone(&sdk), + persister, + event_handler, + )); + + // SPV deferred (Task #15) — `TrustedHttpContextProvider` + // is wired at SDK construction in `sdk::build_sdk`. To + // re-enable the SPV-backed provider, uncomment below and + // restore the `spv` / `context_provider` imports. + // + // ```rust,ignore + // const SPV_READY_TIMEOUT: Duration = Duration::from_secs(180); + // use super::context_provider::SpvContextProvider; + // use super::spv; + // // Start SPV before the bank's sync; SDK proof + // // verification needs SpvContextProvider for quorum keys. + // // Pass the SDK's live address list so SPV peers stay in + // // lock-step with the DAPI endpoints the SDK is actually + // // talking to (port-swapped to the effective P2P port). + // let spv_runtime = spv::start_spv(&manager, &config, &workdir, sdk.address_list()).await?; + // spv::wait_for_mn_list_synced(&spv_runtime, SPV_READY_TIMEOUT).await?; + // // `set_context_provider` is `ArcSwap`-backed, safe to + // // call after construction. + // sdk.set_context_provider(SpvContextProvider::new( + // Arc::clone(&spv_runtime), + // )); + // ``` + let spv_runtime: Option> = None; + + // Panics on under-funded balance — see `BankWallet::load`. + let bank = BankWallet::load(&manager, &config).await?; + + let registry = PersistentTestWalletRegistry::open(workdir.join("test_wallets.json"))?; + + // Best-effort startup sweep; failures don't abort init. + let network = bank.network(); + match cleanup::sweep_orphans(&manager, &bank, ®istry, network).await { + Ok(0) => {} + Ok(n) => tracing::info!( + target: "platform_wallet::e2e::harness", + count = n, + "startup sweep recovered orphan wallets from prior runs" + ), + Err(err) => tracing::warn!( + target: "platform_wallet::e2e::harness", + error = %err, + "startup sweep encountered errors; continuing" + ), + } + + Ok(E2eContext { + config, + workdir, + workdir_lock, + sdk, + manager, + spv_runtime, + bank, + registry, + cancel_token, + wait_hub, + }) + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/mod.rs b/packages/rs-platform-wallet/tests/e2e/framework/mod.rs new file mode 100644 index 00000000000..177f0db472d --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/mod.rs @@ -0,0 +1,263 @@ +//! E2E test harness for `rs-platform-wallet`. +//! +//! Test authors call [`setup`] to obtain a [`SetupGuard`] holding a +//! fresh-seeded [`wallet_factory::TestWallet`] and the +//! process-shared [`E2eContext`] (bank, SDK, registry). After the +//! test body, call [`SetupGuard::teardown`] to drain the wallet +//! back to the bank. +//! +//! ```ignore +//! let s = setup().await?; +//! let addr = s.test_wallet.next_unused_address().await?; +//! s.ctx.bank().fund_address(&addr, 50_000_000).await?; +//! wait_for_balance(&s.test_wallet, &addr, 50_000_000, ...).await?; +//! s.teardown().await?; +//! ``` +//! +//! Convenience imports: [`prelude`]. + +#![allow(dead_code)] + +pub mod bank; +pub mod cleanup; +pub mod config; +pub mod context_provider; +pub mod harness; +pub mod registry; +pub mod sdk; +pub mod signer; +pub mod spv; +pub mod wait; +pub mod wait_hub; +pub mod wallet_factory; +pub mod workdir; + +use key_wallet::gap_limit::DIP17_GAP_LIMIT; +use key_wallet::Network; +use simple_signer::signer::SimpleSigner; + +/// DIP-17 default account / key-class for clear-funds platform +/// payments. Matches `WalletAccountCreationOptions::Default`. +const DEFAULT_ACCOUNT_INDEX: u32 = 0; +const DEFAULT_KEY_CLASS: u32 = 0; + +/// Build a [`SimpleSigner`] populated with the DIP-17 platform-payment +/// gap window for `seed_bytes` on `network`. Pins to +/// `account=0`/`key_class=0` to match +/// `WalletAccountCreationOptions::Default`. `SimpleSigner` already +/// implements `Signer` directly, so callers can pass +/// the returned value straight to `PlatformAddressWallet::transfer`. +pub(super) fn make_platform_signer( + seed_bytes: &[u8; 64], + network: Network, +) -> FrameworkResult { + SimpleSigner::from_seed_for_platform_address_account( + seed_bytes, + network, + DEFAULT_ACCOUNT_INDEX, + DEFAULT_KEY_CLASS, + DIP17_GAP_LIMIT, + ) + .map_err(|err| FrameworkError::Wallet(format!("simple-signer: {err}"))) +} + +/// Common imports for test authors. +pub mod prelude { + pub use super::config::Config; + pub use super::harness::E2eContext; + pub use super::wait::{wait_for, wait_for_balance}; + pub use super::wait_hub::WaitEventHub; + pub use super::{setup, FrameworkError, FrameworkResult, SetupGuard}; +} + +pub use wallet_factory::SetupGuard; + +use harness::E2eContext; + +/// Errors surfaced by the e2e framework. +#[derive(Debug, thiserror::Error)] +pub enum FrameworkError { + /// Placeholder returned by paths that surface an underlying + /// error through tracing; the static string names the call site. + #[error("e2e framework not yet implemented: {0}")] + NotImplemented(&'static str), + + /// Filesystem error — registry IO, workdir creation, lockfile. + /// Message is preformatted with the offending path. + #[error("e2e framework I/O: {0}")] + Io(String), + + /// Wallet error from `platform_wallet`. Stored as String to + /// avoid pulling upstream-error feature flags into the test crate. + #[error("e2e framework wallet error: {0}")] + Wallet(String), + + /// Bank-wallet failure (under-funded, missing mnemonic). + /// Distinct from `Wallet` so CI can treat operator-actionable + /// bank issues separately from transient sync failures. + #[error("e2e bank wallet: {0}")] + Bank(String), + + /// Cleanup / teardown error. Non-fatal — the registry retains + /// the wallet so the next startup's sweep recovers it. + #[error("e2e cleanup: {0}")] + Cleanup(String), + + /// Configuration / env-parsing failure surfaced by helpers in + /// [`config`]. + #[error("e2e config: {0}")] + Config(String), + + /// SDK construction / wiring failure (e.g. `SdkBuilder::build`, + /// `TrustedHttpContextProvider::new`, DAPI address parsing). + /// Carries the upstream error stringified so CI logs and any + /// `Result`-matching caller see the underlying cause. + #[error("e2e sdk: {0}")] + Sdk(String), + + /// SPV (`dash-spv`) construction / sync failure. Distinct from + /// [`Self::Sdk`] so SPV-only deferred-runtime issues are easy to + /// filter when the SPV path comes back online (Task #15). + #[error("e2e spv: {0}")] + Spv(String), +} + +/// Convenience alias used across the harness. +pub type FrameworkResult = Result; + +/// One-shot setup entry point. +/// +/// Lazily initialises the process-shared [`E2eContext`] (bank, SDK, +/// registry) on first call and returns a [`SetupGuard`] wrapping a +/// fresh-seeded [`wallet_factory::TestWallet`]. +/// +/// The wallet is **registered in the persistent registry BEFORE +/// being returned**, so a panic between `setup` and the test's +/// `SetupGuard::teardown` leaves a recoverable trail for the next +/// process startup's sweep. +/// +/// Errors: any failure during context init, wallet creation, or +/// registry insert is surfaced as [`FrameworkError`]. +pub async fn setup() -> FrameworkResult { + let ctx = E2eContext::init().await?; + + let (seed_bytes, seed_hex) = wallet_factory::fresh_seed(); + + // Build the wallet first so we can derive the id for the + // registry entry; on failure there is nothing to persist. + let network = ctx.bank().network(); + let test_wallet = wallet_factory::TestWallet::create( + ctx.manager(), + seed_bytes, + network, + std::sync::Arc::clone(ctx.wait_hub()), + ) + .await?; + + // Persist BEFORE handing the wallet to the test body so a panic + // mid-test surfaces to the next process startup's sweep. + let entry = registry::RegistryEntry { + seed_hex, + created_at: std::time::SystemTime::now(), + status: registry::EntryStatus::Active, + note: None, + }; + ctx.registry().insert(test_wallet.id(), entry)?; + + Ok(SetupGuard { + ctx, + test_wallet, + teardown_called: false, + }) +} + +/// Multi-identity counterpart of [`setup`]. Builds a fresh test +/// wallet, funds `n` distinct platform addresses from the bank, and +/// registers an identity at DIP-9 indices `0..n` on each. +/// +/// Returns a [`MultiIdentitySetupGuard`] wrapping the original +/// [`SetupGuard`] plus the `Vec` so test +/// authors can drive multi-identity flows (DP-002 contact requests, +/// ID-003 transfers) without re-deriving the registration boilerplate. +/// +/// Funding policy: every identity is registered with `funding_per` +/// credits charged to a freshly-derived address, so each call costs +/// `n * (funding_per + register_fee)` credits from the bank. Tests +/// with tight balance windows should pass conservative values — +/// `30_000_000` per identity is the reference; the bank's +/// `min_bank_credits` floor must cover `n * funding_per` plus +/// per-tx fees. +pub async fn setup_with_n_identities( + n: u32, + funding_per: dpp::fee::Credits, +) -> FrameworkResult { + use std::time::Duration; + + use super::framework::wait::wait_for_balance; + + let base = setup().await?; + let mut identities = Vec::with_capacity(n as usize); + + // Each identity gets a distinct funding address so the bank's + // FUNDING_MUTEX serialises funding without contending on the + // same destination. We fund + observe before registration so + // `register_from_addresses` finds the credits already + // committed to platform. + for identity_index in 0..n { + let funding_addr = base.test_wallet.next_unused_address().await?; + base.ctx + .bank() + .fund_address(&funding_addr, funding_per) + .await?; + wait_for_balance( + &base.test_wallet, + &funding_addr, + funding_per, + Duration::from_secs(60), + ) + .await?; + + let registered = base + .test_wallet + .register_identity_from_addresses(funding_addr, funding_per, identity_index) + .await?; + identities.push(registered); + } + + // `register_from_addresses` consumes the funding addresses without + // refreshing the cached `(balance, nonce)` pair on each — by design + // (see `register_from_addresses.rs` cache TODO). Without a sync the + // returned wallet would still report each address at its + // pre-registration balance, and a follow-up auto-select would pick + // already-spent inputs. One sync at the end refreshes balances and + // nonces together for every consumed address in a single round-trip. + base.test_wallet.sync_balances().await?; + + Ok(MultiIdentitySetupGuard { base, identities }) +} + +/// Guard returned by [`setup_with_n_identities`]. Wraps the base +/// [`SetupGuard`] plus the freshly-registered identities. +/// +/// Calling [`MultiIdentitySetupGuard::teardown`] consumes the guard +/// and forwards to the inner [`SetupGuard::teardown`], which sweeps +/// platform-address balances. Identity-credit cleanup is deferred to +/// a follow-up PR — see the `#identity-sweep` TODO in +/// [`cleanup::sweep_identities`]. Until then, every identity +/// registered here keeps its post-registration credit balance. +pub struct MultiIdentitySetupGuard { + /// Inner single-wallet guard. Holds the [`E2eContext`] and the + /// shared [`wallet_factory::TestWallet`] every identity is + /// derived from. + pub base: SetupGuard, + /// Identities registered during setup, ordered by DIP-9 index + /// `0..n`. + pub identities: Vec, +} + +impl MultiIdentitySetupGuard { + /// Forward to the inner [`SetupGuard::teardown`]. + pub async fn teardown(self) -> FrameworkResult<()> { + self.base.teardown().await + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/registry.rs b/packages/rs-platform-wallet/tests/e2e/framework/registry.rs new file mode 100644 index 00000000000..3e06a7b3fc1 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/registry.rs @@ -0,0 +1,286 @@ +//! Persistent JSON-backed test-wallet registry at +//! `/test_wallets.json`. Every `setup` inserts the seed +//! BEFORE returning the wallet so a panic between `setup` and +//! `teardown` leaves a recoverable trail for the next-run +//! [`super::cleanup::sweep_orphans`]. +//! +//! Persistence: write-temp + rename via [`tempfile::NamedTempFile`] +//! (atomic on POSIX, `MOVEFILE_REPLACE_EXISTING` on Windows). NOT +//! fsync'd — the next-run sweep tolerates lost updates. A corrupt +//! JSON file is logged and treated as "no orphans". + +use std::collections::HashMap; +use std::fs; +use std::io; +use std::path::{Path, PathBuf}; +use std::time::SystemTime; + +use parking_lot::Mutex; +use serde::{Deserialize, Serialize}; + +use super::{FrameworkError, FrameworkResult}; + +/// Stable wallet identifier (mirrors `platform_wallet::WalletId`). +/// Stored hex-encoded in JSON. +pub type WalletSeedHash = [u8; 32]; + +/// Lifecycle status of a registry entry. `Active` is steady state; +/// `Failed` flags a sweep error for next-startup retry. +/// +/// A transient `Sweeping` state was considered for cross-process +/// progress signalling but isn't wired up — the per-slot workdir +/// lock already serialises the only writer that touches a given +/// registry path, so a second process never sees an in-flight sweep +/// from a peer. If we ever share a slot we'll need to add it back. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)] +pub enum EntryStatus { + #[default] + Active, + Failed, +} + +/// One row in the registry. Holds enough to reconstruct the wallet +/// via `manager.create_wallet_from_seed_bytes`. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct RegistryEntry { + /// Hex-encoded 64-byte seed. + pub seed_hex: String, + /// Insertion time — debug breadcrumb only. + pub created_at: SystemTime, + pub status: EntryStatus, + /// Free-form note (typically the test name). + pub note: Option, +} + +/// JSON-backed registry guarded by a process-local mutex. File is +/// rewritten via write-temp + rename on every mutation; see module +/// docs for the durability / `fsync` contract. +pub struct PersistentTestWalletRegistry { + path: PathBuf, + state: Mutex>, +} + +impl PersistentTestWalletRegistry { + /// Open or create the registry. Missing file → empty map; + /// corrupt JSON is logged and replaced with an empty map + /// (manual cleanup may be needed). On-disk keys are + /// hex-encoded; in-memory keys are raw `[u8; 32]`. + pub fn open(path: PathBuf) -> FrameworkResult { + let state = match fs::read(&path) { + Ok(bytes) if bytes.is_empty() => HashMap::new(), + Ok(bytes) => serde_json::from_slice::>(&bytes) + .map(decode_keys) + .unwrap_or_else(|err| { + tracing::warn!( + "test-wallet registry at {} is corrupt ({err}); starting fresh — \ + orphans from prior runs may need manual cleanup", + path.display() + ); + HashMap::new() + }), + Err(err) if err.kind() == io::ErrorKind::NotFound => HashMap::new(), + Err(err) => { + return Err(FrameworkError::Io(format!( + "reading registry {}: {err}", + path.display() + ))); + } + }; + Ok(Self { + path, + state: Mutex::new(state), + }) + } + + /// Path of the backing JSON file. + pub fn path(&self) -> &Path { + &self.path + } + + /// Insert (or overwrite) an entry, persisting before mutating + /// the in-memory map: the snapshot is built off the current state, + /// written to disk, and only swapped in once the write succeeds. + /// A failed write therefore leaves both memory and disk on the + /// previous state — preserving the module's "persist before + /// returning" contract under partial failure. + /// Last-write-wins on duplicate. + pub fn insert(&self, hash: WalletSeedHash, entry: RegistryEntry) -> FrameworkResult<()> { + let snapshot = { + let guard = self.state.lock(); + let mut snapshot = guard.clone(); + snapshot.insert(hash, entry); + snapshot + }; + atomic_write_json(&self.path, &snapshot)?; + *self.state.lock() = snapshot; + Ok(()) + } + + /// Remove an entry. Missing-key is OK — teardown is best-effort. + /// Persists before mutating in-memory state (see [`Self::insert`]). + pub fn remove(&self, hash: &WalletSeedHash) -> FrameworkResult<()> { + let snapshot = { + let guard = self.state.lock(); + let mut snapshot = guard.clone(); + snapshot.remove(hash); + snapshot + }; + atomic_write_json(&self.path, &snapshot)?; + *self.state.lock() = snapshot; + Ok(()) + } + + /// Update [`EntryStatus`]; no-op if the entry is absent. Persists + /// before mutating in-memory state (see [`Self::insert`]). + pub fn set_status(&self, hash: &WalletSeedHash, status: EntryStatus) -> FrameworkResult<()> { + let snapshot = { + let guard = self.state.lock(); + let mut snapshot = guard.clone(); + if let Some(entry) = snapshot.get_mut(hash) { + entry.status = status; + } + snapshot + }; + atomic_write_json(&self.path, &snapshot)?; + *self.state.lock() = snapshot; + Ok(()) + } + + /// Snapshot of all entries (Active / Failed). The startup sweep + /// reconstructs each wallet, attempts to drain its credits, and + /// drops the entry on success; a transient sweep failure flips + /// the entry to `Failed` so the next run retries. + pub fn list_orphans(&self) -> Vec<(WalletSeedHash, RegistryEntry)> { + self.state + .lock() + .iter() + .map(|(hash, entry)| (*hash, entry.clone())) + .collect() + } + + /// Status of the entry for `wallet_id`, or `None` if no entry + /// exists. Cheaper than [`Self::list_orphans`] for tests that + /// only need to assert on a single entry's lifecycle. + pub fn get_status(&self, wallet_id: WalletSeedHash) -> Option { + self.state.lock().get(&wallet_id).map(|entry| entry.status) + } +} + +/// Write-temp + rename JSON persist. On Windows +/// [`tempfile::NamedTempFile::persist`] uses `MoveFileEx` with +/// `MOVEFILE_REPLACE_EXISTING` so an existing destination is +/// overwritten (plain `std::fs::rename` fails there on overwrite). +/// No `fsync` — see module docs. +fn atomic_write_json( + path: &Path, + state: &HashMap, +) -> FrameworkResult<()> { + use std::io::Write; + + let on_disk = encode_keys(state); + let bytes = serde_json::to_vec_pretty(&on_disk).map_err(|err| { + FrameworkError::Io(format!("serialising registry to {}: {err}", path.display())) + })?; + let parent = path.parent().ok_or_else(|| { + FrameworkError::Io(format!( + "registry path {} has no parent directory", + path.display() + )) + })?; + fs::create_dir_all(parent) + .map_err(|err| FrameworkError::Io(format!("creating {}: {err}", parent.display())))?; + + // Same-filesystem temp file is required for atomic rename; + // `persist` (not `persist_noclobber`) overwrites cross-platform. + let mut tmp = tempfile::NamedTempFile::new_in(parent).map_err(|err| { + FrameworkError::Io(format!("creating temp file in {}: {err}", parent.display())) + })?; + tmp.write_all(&bytes).map_err(|err| { + FrameworkError::Io(format!("writing temp file {}: {err}", tmp.path().display())) + })?; + tmp.as_file_mut().flush().map_err(|err| { + FrameworkError::Io(format!( + "flushing temp file {}: {err}", + tmp.path().display() + )) + })?; + tmp.persist(path).map_err(|err| { + FrameworkError::Io(format!("persisting temp file -> {}: {err}", path.display())) + })?; + Ok(()) +} + +/// In-memory `[u8; 32]` keys → hex strings for JSON. +fn encode_keys(state: &HashMap) -> HashMap { + state + .iter() + .map(|(hash, entry)| (hex::encode(hash), entry.clone())) + .collect() +} + +/// Inverse of [`encode_keys`] — drop malformed hex keys silently +/// so one bad entry doesn't take the whole registry down. +fn decode_keys(state: HashMap) -> HashMap { + state + .into_iter() + .filter_map(|(hex_key, entry)| { + let bytes = hex::decode(&hex_key).ok()?; + let hash: WalletSeedHash = bytes.try_into().ok()?; + Some((hash, entry)) + }) + .collect() +} + +#[cfg(test)] +mod tests { + use super::*; + + fn tmp_dir() -> tempfile::TempDir { + tempfile::tempdir().expect("tempdir") + } + + fn entry() -> RegistryEntry { + RegistryEntry { + seed_hex: "00".repeat(64), + created_at: SystemTime::UNIX_EPOCH, + status: EntryStatus::Active, + note: Some("test".into()), + } + } + + #[test] + fn missing_file_opens_empty() { + let dir = tmp_dir(); + let reg = PersistentTestWalletRegistry::open(dir.path().join("test_wallets.json")).unwrap(); + assert!(reg.list_orphans().is_empty()); + } + + #[test] + fn insert_remove_round_trip_persists() { + let dir = tmp_dir(); + let path = dir.path().join("test_wallets.json"); + let hash: WalletSeedHash = [7u8; 32]; + + { + let reg = PersistentTestWalletRegistry::open(path.clone()).unwrap(); + reg.insert(hash, entry()).unwrap(); + } + // Reopen; entry must survive. + { + let reg = PersistentTestWalletRegistry::open(path.clone()).unwrap(); + assert_eq!(reg.list_orphans().len(), 1); + reg.remove(&hash).unwrap(); + } + let reg = PersistentTestWalletRegistry::open(path).unwrap(); + assert!(reg.list_orphans().is_empty()); + } + + #[test] + fn corrupt_file_falls_back_to_empty() { + let dir = tmp_dir(); + let path = dir.path().join("test_wallets.json"); + std::fs::write(&path, b"not valid json").unwrap(); + let reg = PersistentTestWalletRegistry::open(path).unwrap(); + assert!(reg.list_orphans().is_empty()); + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/sdk.rs b/packages/rs-platform-wallet/tests/e2e/framework/sdk.rs new file mode 100644 index 00000000000..d452d925cd9 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/sdk.rs @@ -0,0 +1,123 @@ +//! `dash_sdk::Sdk` construction. [`build_sdk`] wires +//! [`TrustedHttpContextProvider`] (the SPV-backed alternative is +//! deferred — Task #15) and resolves DAPI addresses from +//! [`Config::dapi_addresses`] or — for mainnet/testnet — delegates to +//! `SdkBuilder::new_testnet()` / `new_mainnet()` (PR #3570 wires those +//! up against `dash_network_seeds::evo_seeds(network)` upstream). +//! Provider URL override: `PLATFORM_WALLET_E2E_TRUSTED_CONTEXT_URL`. + +use std::num::NonZeroUsize; +use std::sync::Arc; + +use dash_sdk::dapi_client::AddressList; +use dash_sdk::{Sdk, SdkBuilder}; +use dashcore::Network; +use rs_sdk_trusted_context_provider::TrustedHttpContextProvider; + +use super::config::Config; +use super::{FrameworkError, FrameworkResult}; + +/// LRU quorum-cache size for [`TrustedHttpContextProvider`]. +const TRUSTED_CONTEXT_CACHE_SIZE: usize = 256; + +/// Build a fresh `Sdk` with [`TrustedHttpContextProvider`] wired +/// (network-builtin URL, or [`Config::trusted_context_url`] override). +pub fn build_sdk(config: &Config) -> FrameworkResult> { + let network = config.network; + let builder = build_sdk_builder(config, network)?; + + let cache_size = NonZeroUsize::new(TRUSTED_CONTEXT_CACHE_SIZE).expect("cache size > 0"); + let context_provider = build_trusted_context_provider(network, config, cache_size)?; + + let sdk = builder + .with_context_provider(context_provider) + .build() + .map_err(|e| { + tracing::error!(target: "platform_wallet::e2e::sdk", "SdkBuilder::build failed: {e}"); + FrameworkError::Sdk(format!("SdkBuilder::build failed: {e}")) + })?; + + Ok(Arc::new(sdk)) +} + +/// Build the trusted HTTP context provider, honoring the optional +/// `trusted_context_url` override. +fn build_trusted_context_provider( + network: Network, + config: &Config, + cache_size: NonZeroUsize, +) -> FrameworkResult { + let result = match &config.trusted_context_url { + Some(url) => { + tracing::info!( + target: "platform_wallet::e2e::sdk", + %url, + "using TrustedHttpContextProvider with operator-supplied URL" + ); + TrustedHttpContextProvider::new_with_url(network, url.clone(), cache_size) + } + None => { + tracing::info!( + target: "platform_wallet::e2e::sdk", + ?network, + "using TrustedHttpContextProvider with network-builtin URL" + ); + TrustedHttpContextProvider::new(network, None, cache_size) + } + }; + result.map_err(|e| { + tracing::error!( + target: "platform_wallet::e2e::sdk", + "TrustedHttpContextProvider construction failed: {e}" + ); + FrameworkError::Sdk(format!( + "TrustedHttpContextProvider construction failed: {e}" + )) + }) +} + +/// Pick the right [`SdkBuilder`] constructor based on [`Config::dapi_addresses`] +/// and `network`. Honours an explicit operator-supplied address list first; +/// otherwise mainnet/testnet delegate to `SdkBuilder::new_testnet()` / +/// `new_mainnet()` (PR #3570) which derive their bootstrap list from +/// `dash_network_seeds::evo_seeds(network)`. Devnet/local without an explicit +/// address list surfaces an error rather than guessing. +fn build_sdk_builder(config: &Config, network: Network) -> FrameworkResult { + if !config.dapi_addresses.is_empty() { + let addresses = parse_addresses(config.dapi_addresses.iter().map(String::as_str))?; + return Ok(SdkBuilder::new(addresses).with_network(network)); + } + + match network { + Network::Testnet => Ok(SdkBuilder::new_testnet()), + Network::Mainnet => Ok(SdkBuilder::new_mainnet()), + other => { + tracing::error!( + target: "platform_wallet::e2e::sdk", + "no DAPI addresses configured for {other:?} — set {} to a comma-separated list of DAPI URLs", + super::config::vars::DAPI_ADDRESSES, + ); + Err(FrameworkError::Config(format!( + "no DAPI addresses configured for {other:?} — set {} to a comma-separated list of DAPI URLs", + super::config::vars::DAPI_ADDRESSES, + ))) + } + } +} + +fn parse_addresses<'a, I>(iter: I) -> FrameworkResult +where + I: IntoIterator, +{ + iter.into_iter() + .map(|s| { + s.parse().map_err(|e| { + tracing::error!( + target: "platform_wallet::e2e::sdk", + "invalid DAPI address {s:?}: {e}" + ); + FrameworkError::Config(format!("invalid DAPI address {s:?}: {e}")) + }) + }) + .collect() +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/signer.rs b/packages/rs-platform-wallet/tests/e2e/framework/signer.rs new file mode 100644 index 00000000000..34d058912e0 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/signer.rs @@ -0,0 +1,212 @@ +//! Seed-backed `Signer` for the e2e harness, plus a +//! [`derive_identity_key`] helper for building placeholder identity keys. +//! +//! Identities use DIP-9 +//! (`m/9'/coin_type'/5'/0'/ECDSA'/identity_index'/key_index'`). +//! +//! Note: `Signer` is provided directly by `SimpleSigner` +//! (built via `super::make_platform_signer`) and no longer needs a wrapper. + +use async_trait::async_trait; +use dpp::address_funds::AddressWitness; +use dpp::dashcore::signer as core_signer; +use dpp::identity::identity_public_key::accessors::v0::IdentityPublicKeyGettersV0; +use dpp::identity::signer::Signer; +use dpp::identity::{IdentityPublicKey, KeyID, KeyType, Purpose, SecurityLevel}; +use dpp::platform_value::BinaryData; +use dpp::util::hash::ripemd160_sha256; +use dpp::ProtocolError; +use key_wallet::Network; +use simple_signer::signer::SimpleSigner; + +use super::{FrameworkError, FrameworkResult}; + +/// Default gap window pre-derived at construction +/// (matches `key-wallet`'s `DIP17_GAP_LIMIT`). +pub const DEFAULT_GAP_LIMIT: u32 = 20; + +/// Seed-backed [`Signer`] for one DIP-9 identity slot. +/// +/// Composes [`SimpleSigner::from_seed_for_identity`], which populates +/// `inner.address_private_keys` with `(ripemd160_sha256(pubkey), secret)` +/// pairs for `key_index ∈ 0..gap_limit`. The trait impl looks up by +/// hashing the [`IdentityPublicKey::data`] field — matching the same +/// hash used at construction. +#[derive(Clone, Debug)] +pub struct SeedBackedIdentitySigner { + inner: SimpleSigner, + identity_index: u32, +} + +impl SeedBackedIdentitySigner { + /// Build a signer for the DIP-9 identity at `identity_index`, + /// pre-deriving `key_index ∈ 0..DEFAULT_GAP_LIMIT` ECDSA auth keys. + pub fn new( + seed_bytes: &[u8; 64], + network: Network, + identity_index: u32, + ) -> FrameworkResult { + Self::new_with_gap(seed_bytes, network, identity_index, DEFAULT_GAP_LIMIT) + } + + /// Same as [`Self::new`] with an explicit gap window. The window + /// counts identity-key indices, not address indices. + pub fn new_with_gap( + seed_bytes: &[u8; 64], + network: Network, + identity_index: u32, + gap_limit: u32, + ) -> FrameworkResult { + let inner = + SimpleSigner::from_seed_for_identity(seed_bytes, network, identity_index, gap_limit) + .map_err(|err| { + FrameworkError::Wallet(format!("SeedBackedIdentitySigner: {err}")) + })?; + Ok(Self { + inner, + identity_index, + }) + } + + /// DIP-9 identity index this signer is bound to. + pub fn identity_index(&self) -> u32 { + self.identity_index + } + + /// Number of pre-derived identity keys currently in the cache. + pub fn cached_key_count(&self) -> usize { + self.inner.address_private_keys.len() + } +} + +#[async_trait] +impl Signer for SeedBackedIdentitySigner { + async fn sign( + &self, + key: &IdentityPublicKey, + data: &[u8], + ) -> Result { + match key.key_type() { + KeyType::ECDSA_SECP256K1 | KeyType::ECDSA_HASH160 => {} + other => { + return Err(ProtocolError::Generic(format!( + "SeedBackedIdentitySigner: unsupported key type {other:?}" + ))); + } + } + let secret = lookup_identity_secret(&self.inner, key)?; + let signature = core_signer::sign(data, &secret)?; + Ok(signature.to_vec().into()) + } + + async fn sign_create_witness( + &self, + _key: &IdentityPublicKey, + _data: &[u8], + ) -> Result { + // Identity-key signers never produce platform-address witnesses — + // the DPP signer trait forces both methods on a single impl. + Err(ProtocolError::Generic( + "SeedBackedIdentitySigner: AddressWitness is not produced by an identity signer".into(), + )) + } + + fn can_sign_with(&self, key: &IdentityPublicKey) -> bool { + match identity_key_lookup(key) { + Some(pkh) => self.inner.address_private_keys.contains_key(&pkh), + None => false, + } + } +} + +/// Compute the `address_private_keys` lookup key for an +/// [`IdentityPublicKey`]. +/// +/// `SimpleSigner::from_seed_for_identity` keys its cache by +/// `ripemd160_sha256(compressed_pubkey)` — so for `ECDSA_SECP256K1` we +/// hash `key.data()` (the raw pubkey), but for `ECDSA_HASH160` +/// `key.data()` is **already** the 20-byte hash and re-hashing would +/// produce `hash160(hash160(pubkey))`, which would never match. +/// Returns `None` for unsupported key types. +fn identity_key_lookup(key: &IdentityPublicKey) -> Option<[u8; 20]> { + match key.key_type() { + KeyType::ECDSA_SECP256K1 => Some(ripemd160_sha256(key.data().as_slice())), + KeyType::ECDSA_HASH160 => key.data().as_slice().try_into().ok(), + _ => None, + } +} + +/// Resolve an [`IdentityPublicKey`] to its pre-derived 32-byte secret, +/// or surface a [`ProtocolError`] naming the missing fingerprint. +#[allow(clippy::result_large_err)] +fn lookup_identity_secret( + inner: &SimpleSigner, + key: &IdentityPublicKey, +) -> Result<[u8; 32], ProtocolError> { + let pkh = identity_key_lookup(key).ok_or_else(|| { + ProtocolError::Generic(format!( + "SeedBackedIdentitySigner: unsupported key type {:?}", + key.key_type() + )) + })?; + inner + .address_private_keys + .get(&pkh) + .copied() + .ok_or_else(|| { + ProtocolError::Generic(format!( + "SeedBackedIdentitySigner: identity key {} not in pre-derived gap window", + hex::encode(pkh) + )) + }) +} + +/// Build a fully-formed [`IdentityPublicKey`] for a placeholder +/// identity at the DIP-9 slot +/// `m/9'/coin_type'/5'/0'/ECDSA'/identity_index'/key_index'`. +/// +/// Top-level helper — not bound to a [`SeedBackedIdentitySigner`] +/// instance — so call sites can build a placeholder identity from a +/// seed without instantiating the signer first. The returned key has +/// `id = key_index as KeyID` (the canonical convention at +/// registration — DPP assigns key ids sequentially starting at 0), +/// `read_only = false`, `disabled_at = None`, `contract_bounds = None`, +/// `key_type = ECDSA_SECP256K1` (the only DIP-9 derivation type this +/// helper supports). +pub fn derive_identity_key( + seed: &[u8; 64], + network: Network, + identity_index: u32, + key_index: u32, + purpose: Purpose, + security_level: SecurityLevel, +) -> FrameworkResult { + use dpp::identity::identity_public_key::v0::IdentityPublicKeyV0; + use key_wallet::wallet::root_extended_keys::RootExtendedPrivKey; + use platform_wallet::wallet::identity::network::derive_ecdsa_identity_auth_keypair_from_master; + + let root_priv = RootExtendedPrivKey::new_master(seed).map_err(|err| { + FrameworkError::Wallet(format!( + "derive_identity_key: invalid seed for root xpriv: {err}" + )) + })?; + let master = root_priv.to_extended_priv_key(network); + let derived = + derive_ecdsa_identity_auth_keypair_from_master(&master, network, identity_index, key_index) + .map_err(|err| { + FrameworkError::Wallet(format!( + "derive_identity_key: derive ({identity_index}, {key_index}): {err}" + )) + })?; + let v0 = IdentityPublicKeyV0 { + id: key_index as KeyID, + purpose, + security_level, + contract_bounds: None, + key_type: KeyType::ECDSA_SECP256K1, + read_only: false, + data: BinaryData::new(derived.public_key.to_vec()), + disabled_at: None, + }; + Ok(IdentityPublicKey::V0(v0)) +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/spv.rs b/packages/rs-platform-wallet/tests/e2e/framework/spv.rs new file mode 100644 index 00000000000..066037713db --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/spv.rs @@ -0,0 +1,287 @@ +//! SPV runtime startup and readiness wait. +//! +//! Currently unused: the harness wires +//! [`rs_sdk_trusted_context_provider::TrustedHttpContextProvider`] +//! instead. Kept compilable for re-enablement (Task #15). +//! +//! [`start_spv`] spawns the SPV client; [`wait_for_mn_list_synced`] +//! polls until the masternode-list manager reaches +//! `SyncState::Synced`. The harness passes a 180s deadline (warm +//! cache); cold-cache runs need [`COLD_CACHE_TIMEOUT_FLOOR`] (600s) +//! and emit info-level progress logs every +//! [`PROGRESS_LOG_INTERVAL`] for debuggability. + +use std::net::{IpAddr, SocketAddr}; +use std::path::Path; +use std::sync::Arc; +use std::time::{Duration, Instant}; + +use dash_sdk::dapi_client::AddressList; +use dash_spv::client::config::MempoolStrategy; +use dash_spv::sync::{ProgressPercentage, SyncState}; +use dash_spv::types::ValidationMode; +use dash_spv::ClientConfig; +use dashcore::Network; +use platform_wallet::{changeset::PlatformWalletPersistence, PlatformWalletManager, SpvRuntime}; + +use super::config::Config; +use super::{FrameworkError, FrameworkResult}; + +/// Polling interval for [`wait_for_mn_list_synced`]. +const READINESS_POLL_INTERVAL: Duration = Duration::from_millis(500); + +/// Cold-cache floor for [`wait_for_mn_list_synced`] — caller's 180s +/// timeout is sufficient warm but too short for cold testnet +/// (headers + filters + QRInfo). Matches `tests/spv_sync.rs`. +const COLD_CACHE_TIMEOUT_FLOOR: Duration = Duration::from_secs(600); + +/// Period for "still waiting" progress logs. +const PROGRESS_LOG_INTERVAL: Duration = Duration::from_secs(30); + +/// Spawn the SPV client backing the harness's +/// [`PlatformWalletManager`]. Storage is anchored under +/// `/spv-data` where `workdir` is the slot the harness +/// already locked via [`super::workdir::pick_available_workdir`] — +/// concurrent processes get distinct slots and therefore distinct +/// SPV stores, so RocksDB never sees cross-process contention. +/// Returns the same handle as [`PlatformWalletManager::spv_arc`]; +/// shut it down via [`SpvRuntime::stop`]. +/// +/// `address_list` is the SDK's live DAPI address list (typically +/// `sdk.address_list()`). P2P peers are seeded from those same +/// IPs with the effective P2P port — keeping a single source of +/// truth instead of forking from `dash_network_seeds` and risking +/// drift between SDK-tracked and SPV-tracked endpoints. +pub async fn start_spv

( + manager: &Arc>, + config: &Config, + workdir: &Path, + address_list: &AddressList, +) -> FrameworkResult> +where + P: PlatformWalletPersistence + 'static, +{ + let spv = manager.spv_arc(); + let client_config = build_client_config(config, workdir, address_list)?; + + spv.spawn_in_background(client_config); + tracing::info!( + target: "platform_wallet::e2e::spv", + network = ?config.network, + "SPV runtime spawned in background" + ); + + Ok(spv) +} + +/// Block until the SPV mn-list manager reports `Synced`, or the +/// effective timeout (`timeout.max(COLD_CACHE_TIMEOUT_FLOOR)`) +/// elapses. Polls every [`READINESS_POLL_INTERVAL`] and emits an +/// info-level pipeline snapshot every [`PROGRESS_LOG_INTERVAL`] so +/// cold-cache hangs are debuggable from default-level logs. +pub async fn wait_for_mn_list_synced(spv: &SpvRuntime, timeout: Duration) -> FrameworkResult<()> { + let effective_timeout = timeout.max(COLD_CACHE_TIMEOUT_FLOOR); + if effective_timeout != timeout { + tracing::info!( + target: "platform_wallet::e2e::spv", + requested = ?timeout, + effective = ?effective_timeout, + "raising mn-list-sync timeout to cold-cache floor" + ); + } + + let start = Instant::now(); + let deadline = start + effective_timeout; + let mut last_height: Option = None; + let mut last_state: Option = None; + let mut next_progress_log = start + PROGRESS_LOG_INTERVAL; + + loop { + let progress = spv.sync_progress().await; + let mn_snapshot = progress + .as_ref() + .and_then(|p| p.masternodes().ok().cloned()); + + if let Some(mn) = mn_snapshot.as_ref() { + let height = mn.current_height(); + let state = mn.state(); + if Some(height) != last_height || Some(state) != last_state { + tracing::debug!( + target: "platform_wallet::e2e::spv", + state = ?state, + current_height = height, + target_height = mn.target_height(), + elapsed = ?start.elapsed(), + "mn-list sync progress" + ); + last_height = Some(height); + last_state = Some(state); + } + if matches!(state, SyncState::Synced) { + tracing::info!( + target: "platform_wallet::e2e::spv", + current_height = height, + elapsed = ?start.elapsed(), + "mn-list synced" + ); + return Ok(()); + } + if matches!(state, SyncState::Error) { + tracing::error!( + target: "platform_wallet::e2e::spv", + "mn-list sync entered Error state" + ); + return Err(FrameworkError::Spv( + "wait_for_mn_list_synced: mn-list entered Error state".to_string(), + )); + } + } + + // Periodic "still waiting" snapshot at info level so + // cold-cache runs show where the time is going. + let now = Instant::now(); + if now >= next_progress_log { + log_pipeline_snapshot(progress.as_ref(), start.elapsed(), effective_timeout); + next_progress_log = now + PROGRESS_LOG_INTERVAL; + } + + if now >= deadline { + log_pipeline_snapshot(progress.as_ref(), start.elapsed(), effective_timeout); + tracing::error!( + target: "platform_wallet::e2e::spv", + "timed out after {effective_timeout:?} waiting for mn-list sync" + ); + return Err(FrameworkError::Spv(format!( + "wait_for_mn_list_synced: timed out after {effective_timeout:?}" + ))); + } + + tokio::time::sleep(READINESS_POLL_INTERVAL).await; + } +} + +/// One-line info-level pipeline-snapshot log used by +/// [`wait_for_mn_list_synced`]. +fn log_pipeline_snapshot( + progress: Option<&dash_spv::sync::SyncProgress>, + elapsed: Duration, + timeout: Duration, +) { + let Some(p) = progress else { + tracing::info!( + target: "platform_wallet::e2e::spv", + ?elapsed, + ?timeout, + "still waiting for mn-list sync (no SPV progress yet)" + ); + return; + }; + + let headers = p + .headers() + .ok() + .map(|h| (h.state(), h.current_height(), h.target_height())); + let filter_headers = p + .filter_headers() + .ok() + .map(|f| (f.state(), f.current_height(), f.target_height())); + let filters = p + .filters() + .ok() + .map(|f| (f.state(), f.current_height(), f.target_height())); + let mn = p + .masternodes() + .ok() + .map(|m| (m.state(), m.current_height(), m.target_height())); + + tracing::info!( + target: "platform_wallet::e2e::spv", + ?elapsed, + ?timeout, + ?headers, + ?filter_headers, + ?filters, + ?mn, + "still waiting for mn-list sync" + ); +} + +/// Build the SPV [`ClientConfig`] for `config.network`. Storage +/// under `/spv-data` (the slot-locked dir, NOT +/// `workdir_base`), full validation, bloom-filter mempool tracking, +/// and DAPI peers (extracted from `address_list`) seeded with the +/// effective P2P port — sticks to the SDK's live endpoints to skip +/// DNS-discovered peers that lack compact-block-filter support. +fn build_client_config( + config: &Config, + workdir: &Path, + address_list: &AddressList, +) -> FrameworkResult { + let network = config.network; + + let storage_path = workdir.join("spv-data"); + std::fs::create_dir_all(&storage_path).map_err(|e| { + tracing::error!( + target: "platform_wallet::e2e::spv", + "failed to create SPV storage dir {}: {e}", + storage_path.display() + ); + FrameworkError::Spv(format!( + "failed to create SPV storage dir {}: {e}", + storage_path.display() + )) + })?; + + let mut client_config = ClientConfig::new(network) + .with_storage_path(storage_path) + .with_validation_mode(ValidationMode::Full) + .with_start_height(0) + .with_mempool_tracking(MempoolStrategy::BloomFilter); + + seed_p2p_peers(&mut client_config, config, address_list); + + client_config.validate().map_err(|e| { + tracing::error!( + target: "platform_wallet::e2e::spv", + "invalid SPV ClientConfig: {e}" + ); + FrameworkError::Spv(format!("invalid SPV ClientConfig: {e}")) + })?; + + Ok(client_config) +} + +/// Seed the SPV `ClientConfig` with P2P peers derived from the SDK's +/// live `AddressList`. Each address contributes its host IP paired +/// with [`Config::p2p_port`] (already resolved to override-or-default +/// at config construction time). Non-IP hostnames (which +/// `address.uri().host()` can return for DNS targets) fall through to +/// the SPV's own DNS discovery rather than being added as numeric +/// peers. +/// +/// If `Config::p2p_port` is `None` (regtest / devnet without an +/// explicit override) no peers are seeded — the operator must supply +/// [`vars::P2P_PORT`](super::config::vars::P2P_PORT) for those. +fn seed_p2p_peers(client_config: &mut ClientConfig, config: &Config, address_list: &AddressList) { + let Some(port) = config.p2p_port else { + tracing::debug!( + target: "platform_wallet::e2e::spv", + network = ?config.network, + "no SPV P2P port configured (neither {} nor a known network default); \ + skipping peer seeding — SPV will fall back to DNS discovery", + super::config::vars::P2P_PORT, + ); + return; + }; + + for address in address_list.get_live_addresses() { + let Some(host) = address.uri().host() else { + continue; + }; + // SPV's `add_peer` takes a numeric `SocketAddr`; non-IP hosts + // (DNS names) are left for the SPV client's discovery loop. + if let Ok(ip) = host.parse::() { + client_config.add_peer(SocketAddr::new(ip, port)); + } + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/wait.rs b/packages/rs-platform-wallet/tests/e2e/framework/wait.rs new file mode 100644 index 00000000000..d7e0dd86890 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/wait.rs @@ -0,0 +1,242 @@ +//! Async waiters for e2e test conditions. +//! +//! [`wait_for_balance`] is event-driven on the harness's shared +//! [`super::wait_hub::WaitEventHub`] with a +//! [`BACKSTOP_WAKE_INTERVAL`] safety timeout for idle-chain / +//! no-peer scenarios. [`wait_for`] is the generic polling fallback +//! for conditions that can't hook into the event hub. + +use std::future::Future; +use std::time::{Duration, Instant}; + +use dash_sdk::platform::Fetch; +use dash_sdk::Sdk; +use dpp::address_funds::PlatformAddress; +use dpp::fee::Credits; +use dpp::identity::accessors::IdentityGettersV0; +use dpp::identity::Identity; +use dpp::prelude::Identifier; + +use super::wallet_factory::TestWallet; +use super::{FrameworkError, FrameworkResult}; + +/// Backstop wake interval for [`wait_for_balance`] — bounds the +/// wall clock when no events arrive (idle chain, no peers). +pub const BACKSTOP_WAKE_INTERVAL: Duration = Duration::from_secs(2); + +/// Default poll interval for [`wait_for`]. +pub const DEFAULT_POLL_INTERVAL: Duration = Duration::from_millis(500); + +/// Generic polling helper for conditions that aren't tied to the +/// event hub. +/// +/// Calls `poll` every [`DEFAULT_POLL_INTERVAL`] until it returns +/// `Some(T)` or `timeout` elapses. The current in-flight future is +/// allowed to resolve before the timeout error is returned — no +/// cancellation mid-attempt. Returns +/// [`FrameworkError::Cleanup`] on timeout. +pub async fn wait_for(mut poll: F, timeout: Duration) -> FrameworkResult +where + F: FnMut() -> Fut, + Fut: Future>, +{ + let deadline = Instant::now() + timeout; + loop { + if let Some(value) = poll().await { + return Ok(value); + } + if Instant::now() >= deadline { + return Err(FrameworkError::Cleanup(format!( + "wait_for timed out after {timeout:?}" + ))); + } + tokio::time::sleep(DEFAULT_POLL_INTERVAL).await; + } +} + +/// Wait for `addr`'s balance on `test_wallet` to reach at least +/// `expected`, syncing on every wake. +/// +/// Event-driven on [`TestWallet::wait_hub`]; a +/// [`BACKSTOP_WAKE_INTERVAL`] cap keeps idle-chain / no-peer +/// scenarios making progress. Sync errors are logged at `debug` and +/// treated as transient — the next event (or backstop wake) retries. +/// The `Notified` future is captured BEFORE the sync to avoid +/// dropping a notification that fires mid-sync. Returns +/// [`FrameworkError::Cleanup`] on `timeout`. +pub async fn wait_for_balance( + test_wallet: &TestWallet, + addr: &PlatformAddress, + expected: Credits, + timeout: Duration, +) -> FrameworkResult<()> { + let start = Instant::now(); + let deadline = Instant::now() + timeout; + + loop { + // Capture `Notified` BEFORE the sync so a notification + // arriving mid-sync isn't lost; pin + `as_mut()` lets us + // re-await the same future across timeouts. + let notified = test_wallet.wait_hub().notified(); + tokio::pin!(notified); + + match test_wallet.sync_balances().await { + Ok(()) => { + let balances = test_wallet.balances().await; + let current = balances.get(addr).copied().unwrap_or(0); + if current >= expected { + tracing::info!( + target: "platform_wallet::e2e::wait", + addr = ?addr, + observed = current, + elapsed = ?start.elapsed(), + "balance reached target" + ); + return Ok(()); + } + tracing::debug!( + target: "platform_wallet::e2e::wait", + addr = ?addr, + current, + expected, + "balance below target; waiting on event hub" + ); + } + Err(err) => tracing::debug!( + target: "platform_wallet::e2e::wait", + error = %err, + "sync_balances during wait_for_balance failed; retrying" + ), + } + + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(FrameworkError::Cleanup(format!( + "wait_for_balance timed out after {timeout:?} \ + (addr={addr:?} expected={expected})" + ))); + } + // Backstop wake on idle chains; real activity wakes us + // earlier via the `Notified` future. + let cap = std::cmp::min(remaining, BACKSTOP_WAKE_INTERVAL); + let _ = tokio::time::timeout(cap, notified.as_mut()).await; + } +} + +/// Wait for an on-chain identity balance to reach at least `expected`. +/// +/// Polls `Identity::fetch(sdk, identity_id)` every +/// [`BACKSTOP_WAKE_INTERVAL`] and returns the observed balance when +/// it meets the threshold. Network errors during polling are treated +/// as transient (logged at `debug`); a missing identity (the SDK +/// returns `None`) is treated as "not yet visible" and re-polled. +pub async fn wait_for_identity_balance( + sdk: &Sdk, + identity_id: Identifier, + expected: Credits, + timeout: Duration, +) -> FrameworkResult { + let start = Instant::now(); + let deadline = Instant::now() + timeout; + + loop { + match Identity::fetch(sdk, identity_id).await { + Ok(Some(identity)) => { + let balance = identity.balance(); + if balance >= expected { + tracing::info!( + target: "platform_wallet::e2e::wait", + ?identity_id, + observed = balance, + expected, + elapsed = ?start.elapsed(), + "identity balance reached target" + ); + return Ok(balance); + } + tracing::debug!( + target: "platform_wallet::e2e::wait", + ?identity_id, + current = balance, + expected, + "identity balance below target" + ); + } + Ok(None) => tracing::debug!( + target: "platform_wallet::e2e::wait", + ?identity_id, + "identity not yet visible on chain" + ), + Err(err) => tracing::debug!( + target: "platform_wallet::e2e::wait", + error = %err, + "fetch:: failed during wait_for_identity_balance" + ), + } + + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(FrameworkError::Cleanup(format!( + "wait_for_identity_balance timed out after {timeout:?} \ + (identity_id={identity_id:?} expected={expected})" + ))); + } + // Cap the sleep against the remaining budget so a sub-2s + // `timeout` doesn't overshoot by up to `BACKSTOP_WAKE_INTERVAL`. + tokio::time::sleep(std::cmp::min(remaining, BACKSTOP_WAKE_INTERVAL)).await; + } +} + +/// Wait for a DPNS `.dash` registration to become visible to +/// resolvers. +/// +/// Polls [`Sdk::resolve_dpns_name`] every [`BACKSTOP_WAKE_INTERVAL`] +/// until it returns `Some(..)` or the timeout elapses. Returns the +/// resolved owning identity id on success. Test authors typically +/// pair this with the wallet's `register_name_with_external_signer` +/// call so the assertion side of the test waits on observable +/// propagation, not just on the state-transition's broadcast +/// acknowledgement. +pub async fn wait_for_dpns_name_visible( + sdk: &Sdk, + name: &str, + timeout: Duration, +) -> FrameworkResult { + let start = Instant::now(); + let deadline = Instant::now() + timeout; + + loop { + match sdk.resolve_dpns_name(name).await { + Ok(Some(id)) => { + tracing::info!( + target: "platform_wallet::e2e::wait", + name, + elapsed = ?start.elapsed(), + "DPNS name visible" + ); + return Ok(id); + } + Ok(None) => tracing::debug!( + target: "platform_wallet::e2e::wait", + name, + "DPNS name not yet visible" + ), + Err(err) => tracing::debug!( + target: "platform_wallet::e2e::wait", + name, + error = %err, + "DPNS resolve failed during wait_for_dpns_name_visible" + ), + } + + let remaining = deadline.saturating_duration_since(Instant::now()); + if remaining.is_zero() { + return Err(FrameworkError::Cleanup(format!( + "wait_for_dpns_name_visible timed out after {timeout:?} (name={name:?})" + ))); + } + // Cap the sleep against the remaining budget so a sub-2s + // `timeout` doesn't overshoot by up to `BACKSTOP_WAKE_INTERVAL`. + tokio::time::sleep(std::cmp::min(remaining, BACKSTOP_WAKE_INTERVAL)).await; + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/wait_hub.rs b/packages/rs-platform-wallet/tests/e2e/framework/wait_hub.rs new file mode 100644 index 00000000000..faa1019c285 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/wait_hub.rs @@ -0,0 +1,74 @@ +//! Bridges `PlatformEventHandler` callbacks to async waiters. +//! +//! [`WaitEventHub`] is installed as the harness's +//! `PlatformEventHandler`. Every SPV / wallet / platform-address +//! sync event calls [`Notify::notify_waiters`]; helpers like +//! [`super::wait::wait_for_balance`] capture `Notified` BEFORE +//! polling so notifications arriving mid-sync aren't lost. +//! +//! Ignored: `on_progress` (per-header-batch noise) and `on_error` +//! (surfaced through tracing; no testable state change). + +use platform_wallet::events::{EventHandler, PlatformEventHandler, WalletEvent}; +use platform_wallet::PlatformAddressSyncSummary; +use tokio::sync::futures::Notified; +use tokio::sync::Notify; + +/// `Notify`-based hub that fans test-relevant events out to async +/// waiters. +/// +/// One instance per [`super::harness::E2eContext`]; clone the `Arc` +/// into every [`super::wallet_factory::TestWallet`] via +/// [`super::harness::E2eContext::wait_hub`]. +pub struct WaitEventHub { + notify: Notify, +} + +impl WaitEventHub { + /// Build an empty hub. + pub fn new() -> Self { + Self { + notify: Notify::new(), + } + } + + /// Future that resolves the next time *any* relevant event + /// fires. Pin (e.g. `tokio::pin!`) before awaiting so + /// notifications arriving between registration and await aren't + /// dropped. + pub fn notified(&self) -> Notified<'_> { + self.notify.notified() + } + + /// Wake every registered waiter. Test-only nudge for non-event + /// state changes (e.g. manual cache pokes). + pub fn notify_all(&self) { + self.notify.notify_waiters(); + } +} + +impl Default for WaitEventHub { + fn default() -> Self { + Self::new() + } +} + +impl EventHandler for WaitEventHub { + fn on_sync_event(&self, _event: &dash_spv::sync::SyncEvent) { + self.notify.notify_waiters(); + } + + fn on_network_event(&self, _event: &dash_spv::network::NetworkEvent) { + self.notify.notify_waiters(); + } + + fn on_wallet_event(&self, _event: &WalletEvent) { + self.notify.notify_waiters(); + } +} + +impl PlatformEventHandler for WaitEventHub { + fn on_platform_address_sync_completed(&self, _summary: &PlatformAddressSyncSummary) { + self.notify.notify_waiters(); + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/wallet_factory.rs b/packages/rs-platform-wallet/tests/e2e/framework/wallet_factory.rs new file mode 100644 index 00000000000..450d0813920 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/wallet_factory.rs @@ -0,0 +1,731 @@ +//! Test-wallet factory plus the [`SetupGuard`] returned by +//! [`super::setup`]. Every wallet is registered in the persistent +//! registry BEFORE returning to the test body, so a panic between +//! `setup` and `teardown` leaves a recoverable trail for the next +//! startup's sweep. + +use std::collections::BTreeMap; +use std::sync::Arc; +use std::time::{Duration, SystemTime}; + +use dpp::address_funds::{AddressFundsFeeStrategy, AddressFundsFeeStrategyStep, PlatformAddress}; +use dpp::fee::Credits; +use dpp::identity::accessors::IdentityGettersV0; +use dpp::identity::v0::IdentityV0; +use dpp::identity::{Identity, IdentityPublicKey, KeyID, Purpose, SecurityLevel}; +use dpp::prelude::Identifier; +use dpp::version::PlatformVersion; +use key_wallet::account::account_collection::PlatformPaymentAccountKey; +use key_wallet::wallet::initialization::{ + PlatformPaymentAccountSpec, WalletAccountCreationOptions, +}; +use key_wallet::Network; +use platform_wallet::wallet::persister::NoPlatformPersistence; +use platform_wallet::wallet::platform_addresses::InputSelection; +use platform_wallet::{ + PlatformAddressChangeSet, PlatformWallet, PlatformWalletError, PlatformWalletManager, +}; +use rand::rngs::OsRng; +use rand::RngCore; + +use simple_signer::signer::SimpleSigner; + +use super::harness::E2eContext; +use super::registry::{EntryStatus, PersistentTestWalletRegistry, RegistryEntry, WalletSeedHash}; +use super::signer::{derive_identity_key, SeedBackedIdentitySigner}; +use super::wait::wait_for_identity_balance; +use super::wait_hub::WaitEventHub; +use super::{make_platform_signer, FrameworkError, FrameworkResult}; + +/// DIP-17 default PlatformPayment account spec — pinned to +/// `PlatformPaymentAccountSpec` field defaults so a struct-shape change +/// upstream fails to compile here. +const DEFAULT_PLATFORM_PAYMENT_ACCOUNT_SPEC: PlatformPaymentAccountSpec = + PlatformPaymentAccountSpec { + account: 0, + key_class: 0, + }; + +pub(super) const DEFAULT_ACCOUNT_INDEX_PUB: u32 = DEFAULT_PLATFORM_PAYMENT_ACCOUNT_SPEC.account; +pub(super) const DEFAULT_KEY_CLASS_PUB: u32 = DEFAULT_PLATFORM_PAYMENT_ACCOUNT_SPEC.key_class; + +/// `PlatformPaymentAccountKey` for the default DIP-17 account, derived +/// from the canonical [`PlatformPaymentAccountSpec`] in `key_wallet`. +fn default_platform_payment_account_key() -> PlatformPaymentAccountKey { + let PlatformPaymentAccountSpec { account, key_class } = PlatformPaymentAccountSpec::default(); + PlatformPaymentAccountKey { account, key_class } +} + +/// Per-test wallet handle. Exposes the high-level operations test +/// cases reach for (`next_unused_address`, `transfer`, `balances`, +/// `sync_balances`) without leaking the underlying `PlatformWallet` +/// surface. +pub struct TestWallet { + seed_bytes: [u8; 64], + pub(crate) wallet: Arc, + signer: SimpleSigner, + /// Cloned from the [`E2eContext`]; backs + /// [`super::wait::wait_for_balance`]. + wait_hub: Arc, +} + +impl std::fmt::Debug for TestWallet { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("TestWallet") + .field("wallet_id", &hex::encode(self.wallet.wallet_id())) + .finish_non_exhaustive() + } +} + +impl TestWallet { + /// Create a fresh-seeded test wallet, register with the + /// manager, and eagerly initialise its platform-address + /// provider so `next_unused_address` / `transfer` work + /// immediately on return. + /// + /// The caller passes `seed_bytes` (typically via `OsRng`) so the + /// registry can persist them BEFORE the wallet is returned — + /// a crashed test still has a recoverable record. + pub async fn create( + manager: &Arc>, + seed_bytes: [u8; 64], + network: Network, + wait_hub: Arc, + ) -> FrameworkResult { + let wallet = manager + .create_wallet_from_seed_bytes( + network, + seed_bytes, + WalletAccountCreationOptions::Default, + ) + .await + .map_err(wallet_err)?; + // Force the lazy platform-address init now so test code + // doesn't see a surprise first-use latency hit. + wallet.platform().initialize().await; + let signer = make_platform_signer(&seed_bytes, network)?; + Ok(Self { + seed_bytes, + wallet, + signer, + wait_hub, + }) + } + + /// Stable wallet id used as the registry key. + pub fn id(&self) -> WalletSeedHash { + self.wallet.wallet_id() + } + + /// 64-byte seed used to derive this wallet (persisted in the + /// registry so a sweep can reconstruct the wallet). + pub fn seed_bytes(&self) -> [u8; 64] { + self.seed_bytes + } + + /// Underlying `PlatformWallet` — for tests that reach into + /// identity / token / core APIs. + pub fn platform_wallet(&self) -> &Arc { + &self.wallet + } + + /// Seed-backed address signer used by `transfer`; tests that + /// broadcast transitions via the SDK directly can pass it in. + /// Implements `Signer` directly. + pub fn address_signer(&self) -> &SimpleSigner { + &self.signer + } + + /// Process-shared event hub — backs + /// [`super::wait::wait_for_balance`]. + pub fn wait_hub(&self) -> &Arc { + &self.wait_hub + } + + /// Next unused receive address on the wallet's default + /// platform-payment account. Pool advances only after a sync + /// observes an inbound credit on the prior address; a freshly + /// returned address has balance `0` until the next sync sees it + /// funded. Returns a new address if the gap window is exhausted. + pub async fn next_unused_address(&self) -> FrameworkResult { + self.wallet + .platform() + .next_unused_receive_address(default_platform_payment_account_key()) + .await + .map_err(wallet_err) + } + + /// Run a BLAST sync pass and refresh balances for every + /// tracked address. + pub async fn sync_balances(&self) -> FrameworkResult<()> { + self.wallet + .platform() + .sync_balances(None) + .await + .map(|_| ()) + .map_err(wallet_err) + } + + /// Snapshot of cached balances per tracked address. Reflects + /// the last `sync_balances` — call it first if you need a fresh + /// view. + pub async fn balances(&self) -> BTreeMap { + self.wallet + .platform() + .addresses_with_balances() + .await + .into_iter() + .collect() + } + + /// Total credits across every tracked address. + pub async fn total_credits(&self) -> Credits { + self.wallet.platform().total_credits().await + } + + /// Transfer credits to one or more outputs. Auto-selects inputs + /// from the default account and uses [`default_fee_strategy`] + /// (reduce output #0). `outputs` maps each recipient address + /// to its credit amount. + pub async fn transfer( + &self, + outputs: BTreeMap, + ) -> FrameworkResult { + self.wallet + .platform() + .transfer( + DEFAULT_ACCOUNT_INDEX_PUB, + InputSelection::Auto, + outputs, + default_fee_strategy(), + Some(PlatformVersion::latest()), + &self.signer, + ) + .await + .map_err(wallet_err) + } + + /// Like [`Self::transfer`] but with an explicit input list + /// (`InputSelection::Explicit`). Used by tests that need to + /// drive the SDK's address-funds path without the wallet's + /// `auto_select_inputs` step — typically the negative variants + /// of PA-002 that probe insufficient-funds behaviour on a + /// caller-chosen input set. + pub async fn transfer_with_inputs( + &self, + outputs: BTreeMap, + inputs: BTreeMap, + ) -> FrameworkResult { + self.wallet + .platform() + .transfer( + DEFAULT_ACCOUNT_INDEX_PUB, + InputSelection::Explicit(inputs), + outputs, + default_fee_strategy(), + Some(PlatformVersion::latest()), + &self.signer, + ) + .await + .map_err(wallet_err) + } + + /// Like [`Self::transfer_with_inputs`] but additionally returns + /// the canonical bytes of an `AddressFundsTransferTransition` + /// built with the same inputs / outputs / fee strategy. + /// + /// Used by replay-safety tests (PA-006): re-submit the captured + /// bytes via `sdk.broadcast_state_transition` and assert the + /// platform rejects the duplicate. The captured bytes are taken + /// from a sibling build (separate nonce fetch, separate signing + /// pass) — they are NOT byte-equal to the broadcast transition + /// because ECDSA signing is non-deterministic (no RFC 6979 enforced + /// here). Both transitions share identical address nonces: the + /// sibling capture never broadcasts, so on-chain state between the + /// two builds is unchanged. For PA-006 this means re-broadcast is + /// rejected on nonce-duplicate detection (not content-hash duplicate + /// detection); assertions should target the nonce-duplicate + /// rejection reason, or capture bytes from the production submission + /// so the replayed transition shares both nonce and signature. + /// + /// The caller's `inputs` map supplies the **set of input addresses**; + /// per-address amounts are recomputed by [`balance_explicit_inputs`] + /// so that `Σ inputs == Σ outputs` (the protocol's strict balance + /// check on `AddressFundsTransferTransition`). With + /// `[ReduceOutput(0)]`, the chain-time fee is taken from output 0 + /// at execution; the encoded transition itself must still balance + /// pre-fee. Callers may pass `address.balance` as a placeholder — + /// it is only used as a relative weight when distributing across + /// multiple input addresses. + pub async fn transfer_capturing_st_bytes( + &self, + outputs: BTreeMap, + inputs: BTreeMap, + ) -> FrameworkResult<(PlatformAddressChangeSet, Vec)> { + use dash_sdk::platform::transition::address_inputs::{fetch_inputs_with_nonce, nonce_inc}; + use dpp::serialization::PlatformSerializable; + use dpp::state_transition::address_funds_transfer_transition::methods::AddressFundsTransferTransitionMethodsV0; + use dpp::state_transition::address_funds_transfer_transition::AddressFundsTransferTransition; + + let platform_version = PlatformVersion::latest(); + let balanced_inputs = balance_explicit_inputs(&inputs, &outputs, platform_version)?; + + // Sibling build for byte capture. Fetches on-chain nonces and + // bumps them via the public SDK helpers, then signs + serializes. + // The transition is NEVER broadcast — `transfer_with_inputs` + // below does its own nonce fetch + sign + broadcast. + let inputs_with_nonce = fetch_inputs_with_nonce(self.wallet.sdk(), &balanced_inputs) + .await + .map_err(|err| FrameworkError::Wallet(format!("nonce fetch: {err}")))?; + let inputs_with_nonce = nonce_inc(inputs_with_nonce); + + let st = AddressFundsTransferTransition::try_from_inputs_with_signer( + inputs_with_nonce, + outputs.clone(), + default_fee_strategy(), + &self.signer, + Default::default(), + platform_version, + ) + .await + .map_err(|err| FrameworkError::Wallet(format!("st build: {err}")))?; + let bytes = PlatformSerializable::serialize_to_bytes(&st) + .map_err(|err| FrameworkError::Wallet(format!("st serialize: {err}")))?; + + // Production transfer with the same explicit inputs. Wallet + // caches + chain state advance per the canonical path. + let cs = self.transfer_with_inputs(outputs, balanced_inputs).await?; + Ok((cs, bytes)) + } + + /// Network the wallet operates against. Mirrors `wallet.sdk().network`. + fn network(&self) -> Network { + self.wallet.sdk().network + } + + /// Register a new identity, funded entirely from this wallet's + /// platform-address balances. + /// + /// The helper: + /// 1. Accepts a caller-provided `funding_address` (the caller is + /// responsible for funding it — typically via + /// `bank.fund_address` + [`super::wait::wait_for_balance`] + /// before this call). No pre-check is performed; passing an + /// under-funded address surfaces as a registration failure + /// downstream rather than a clear error here. + /// 2. Derives MASTER + HIGH ECDSA auth keys at DIP-9 slot + /// `(identity_index, 0)` and `(identity_index, 1)`. + /// 3. Builds a placeholder [`Identity`] populated with those + /// two keys. + /// 4. Calls + /// [`IdentityWallet::register_from_addresses`](platform_wallet::wallet::identity::IdentityWallet::register_from_addresses) + /// with the funding map `{addr_1 → funding}`. + /// 5. Waits up to [`DEFAULT_IDENTITY_VISIBILITY_TIMEOUT`] for + /// the on-chain balance to reach the post-registration + /// threshold. + pub async fn register_identity_from_addresses( + &self, + funding_address: PlatformAddress, + funding: Credits, + identity_index: u32, + ) -> FrameworkResult { + let network = self.network(); + let identity_signer = Arc::new(SeedBackedIdentitySigner::new( + &self.seed_bytes, + network, + identity_index, + )?); + + // Slot 0 → MASTER, slot 1 → HIGH. Match the DET / DPNS + // register_name pattern: MASTER is required for identity + // mutation, HIGH covers signing for most state transitions. + let master_key = derive_identity_key( + &self.seed_bytes, + network, + identity_index, + 0, + Purpose::AUTHENTICATION, + SecurityLevel::MASTER, + )?; + let high_key = derive_identity_key( + &self.seed_bytes, + network, + identity_index, + 1, + Purpose::AUTHENTICATION, + SecurityLevel::HIGH, + )?; + + // Build the placeholder identity. `id` is recomputed from + // the input-address map by the SDK at submit time; we set + // it to `Identifier::default()` per the wallet API contract. + use dpp::identity::identity_public_key::accessors::v0::IdentityPublicKeyGettersV0; + let mut public_keys: BTreeMap = BTreeMap::new(); + public_keys.insert(master_key.id(), master_key.clone()); + public_keys.insert(high_key.id(), high_key.clone()); + let placeholder = Identity::V0(IdentityV0 { + id: Identifier::default(), + public_keys, + balance: 0, + revision: 0, + }); + + let inputs: BTreeMap = + std::iter::once((funding_address, funding)).collect(); + + let registered = self + .wallet + .identity() + .register_from_addresses( + &placeholder, + inputs, + None, + identity_index, + identity_signer.as_ref(), + &self.signer, + None, + ) + .await + .map_err(wallet_err)?; + + // The balance check uses a post-fee threshold of `funding / + // 2` — registration fees on testnet are well below half the + // funding amount, so this gives us a deterministic "the + // identity exists and has been credited" assertion without + // hard-coding a specific fee number that a protocol bump + // could invalidate. + wait_for_identity_balance( + self.wallet.sdk(), + registered.id(), + funding / 2, + DEFAULT_IDENTITY_VISIBILITY_TIMEOUT, + ) + .await?; + + Ok(RegisteredIdentity { + id: registered.id(), + master_key, + high_key, + signer: identity_signer, + identity_index, + funding, + }) + } +} + +/// Default fee strategy: reduce output #0 by the fee amount. +pub(crate) fn default_fee_strategy() -> AddressFundsFeeStrategy { + vec![AddressFundsFeeStrategyStep::ReduceOutput(0)] +} + +/// Bank-funding fee strategy: deduct fee from input #0 so the +/// recipient receives the **exact** requested amount. +/// +/// Used by [`super::bank::BankWallet::fund_address`] so +/// downstream calls — e.g. `register_identity_from_addresses( +/// {addr: N}, ...)` — don't have to compensate for fee +/// deduction at the recipient. +/// +/// Tests that need the alternative `ReduceOutput(0)` semantics +/// (e.g. PA-002b verifying `Σ outputs + fee == input balance`) +/// should call [`default_fee_strategy`] explicitly. +pub(crate) fn bank_fee_strategy() -> AddressFundsFeeStrategy { + vec![AddressFundsFeeStrategyStep::DeductFromInput(0)] +} + +/// Rebalance an explicit-input map so its sum equals `Σ outputs`. +/// +/// `AddressFundsTransferTransition` validation rejects with +/// `InputOutputBalanceMismatchError` unless the encoded transition +/// satisfies `Σ inputs == Σ outputs`. With `[ReduceOutput(0)]` (the +/// harness default) the chain-time fee is taken from output 0 at +/// execution; the transition payload must still balance pre-fee. +/// +/// Caller-supplied per-address values act as relative weights — a +/// single-input map is assigned the full output sum; multi-input +/// maps split the output sum proportionally with any rounding +/// remainder absorbed by the lex-smallest entry. Each share is held +/// at or above `min_input_amount` (the protocol's per-input floor) by +/// pulling the deficit from the donor with the largest share that +/// still has headroom. +fn balance_explicit_inputs( + inputs: &BTreeMap, + outputs: &BTreeMap, + platform_version: &PlatformVersion, +) -> FrameworkResult> { + if inputs.is_empty() { + return Err(FrameworkError::Wallet( + "transfer_capturing_st_bytes requires at least one input address".into(), + )); + } + let total_output: Credits = outputs.values().copied().sum(); + let min_input = platform_version + .dpp + .state_transitions + .address_funds + .min_input_amount; + if total_output < min_input { + return Err(FrameworkError::Wallet(format!( + "Σ outputs {total_output} < min_input_amount {min_input}: cannot \ + build a balanced explicit-input map" + ))); + } + + // Single input: assign the full output sum directly. This is the + // PA-006 / PA-006b shape and the path that matters in practice. + if inputs.len() == 1 { + let addr = *inputs.keys().next().expect("len == 1"); + let mut out = BTreeMap::new(); + out.insert(addr, total_output); + return Ok(out); + } + + // Multi-input: weight by caller values. Zero-sum weights collapse + // to equal share to avoid div-by-zero. + let weight_total: u128 = inputs.values().map(|w| *w as u128).sum(); + let n = inputs.len() as u128; + let mut shares: BTreeMap = BTreeMap::new(); + let mut assigned: u128 = 0; + for (addr, weight) in inputs { + let share = if weight_total == 0 { + (total_output as u128) / n + } else { + ((total_output as u128) * (*weight as u128)) / weight_total + }; + shares.insert(*addr, share as Credits); + assigned += share; + } + // Lex-smallest entry absorbs the rounding remainder so Σ matches. + let remainder = (total_output as u128).saturating_sub(assigned) as Credits; + if remainder > 0 { + if let Some((_, slot)) = shares.iter_mut().next() { + *slot = slot.saturating_add(remainder); + } + } + + // Lift any sub-floor share by pulling the deficit from the largest + // peer that retains ≥ min_input after the donation. + let needs_lift: Vec<(PlatformAddress, Credits)> = shares + .iter() + .filter(|(_, v)| **v < min_input) + .map(|(a, v)| (*a, *v)) + .collect(); + for (addr, share) in needs_lift { + let deficit = min_input - share; + let donor = shares + .iter() + .filter(|(a, v)| **a != addr && **v >= min_input.saturating_add(deficit)) + .max_by_key(|(_, v)| **v) + .map(|(a, _)| *a); + let Some(donor) = donor else { + return Err(FrameworkError::Wallet(format!( + "cannot satisfy min_input_amount {min_input} on {n} inputs with \ + Σ outputs {total_output}; no donor with sufficient headroom" + ))); + }; + if let Some(slot) = shares.get_mut(&donor) { + *slot -= deficit; + } + if let Some(slot) = shares.get_mut(&addr) { + *slot += deficit; + } + } + + debug_assert_eq!( + shares.values().copied().sum::(), + total_output, + "balanced inputs must sum to Σ outputs" + ); + Ok(shares) +} + +/// Default timeout for [`TestWallet::register_identity_from_addresses`] +/// to observe the new identity on chain. +const DEFAULT_IDENTITY_VISIBILITY_TIMEOUT: Duration = Duration::from_secs(30); + +/// A registered identity returned by +/// [`TestWallet::register_identity_from_addresses`]. +/// +/// Bundles the on-chain identifier with the two placeholder keys +/// (MASTER + HIGH) and the seed-backed identity signer so callers +/// can drive identity-side state transitions (top-up, transfer, +/// DPNS register, ...) without re-deriving anything. +pub struct RegisteredIdentity { + /// On-chain identity identifier. + pub id: Identifier, + /// MASTER auth key (DPP `KeyID = 0`). + pub master_key: IdentityPublicKey, + /// HIGH auth key (DPP `KeyID = 1`). + pub high_key: IdentityPublicKey, + /// `Arc`-shared signer pre-derived for this identity's DIP-9 slot. + /// `Arc` lets callers hand the same signer to multiple state-transition + /// builders without re-creating the key cache. + pub signer: Arc, + /// DIP-9 identity index used during registration. + pub identity_index: u32, + /// Pre-fee credits that funded the identity at `register_from_addresses`. + pub funding: Credits, +} + +impl std::fmt::Debug for RegisteredIdentity { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RegisteredIdentity") + .field("id", &self.id) + .field("identity_index", &self.identity_index) + .field("funding", &self.funding) + .finish_non_exhaustive() + } +} + +/// Generate a fresh 64-byte seed plus its hex encoding for the +/// registry. Single source so signer + registry stay in sync. +pub fn fresh_seed() -> ([u8; 64], String) { + let mut seed = [0u8; 64]; + OsRng.fill_bytes(&mut seed); + let hex = hex::encode(seed); + (seed, hex) +} + +/// Build a registry entry for a fresh seed. Insert it BEFORE +/// handing the wallet to the test body so a panic between insert +/// and teardown leaves a recoverable trail. +pub fn registry_entry_from_seed(seed: &[u8; 64], note: Option) -> RegistryEntry { + RegistryEntry { + seed_hex: hex::encode(seed), + created_at: SystemTime::now(), + status: EntryStatus::Active, + note, + } +} + +/// Guard returned by [`super::setup`]. +/// +/// Tests SHOULD call [`SetupGuard::teardown`] explicitly once +/// they're done; the [`Drop`] impl is a panic-safety fallback that +/// logs a warning and relies on the next-startup +/// `cleanup::sweep_orphans` to recover funds. +pub struct SetupGuard { + /// Process-shared context (`&'static` — `E2eContext::init` + /// returns a singleton). + pub ctx: &'static E2eContext, + /// Fresh-seed test wallet, already registered for cleanup. + pub test_wallet: TestWallet, + /// Set to `true` by a successful [`SetupGuard::teardown`] so + /// [`Drop`] skips its warning. + pub(crate) teardown_called: bool, +} + +impl SetupGuard { + /// Sweep the test wallet's funds back to the bank and remove + /// its registry entry. + /// + /// Best-effort: a transient sync / transfer failure retains the + /// registry entry, so the next process startup retries via + /// [`super::cleanup::sweep_orphans`]. + pub async fn teardown(mut self) -> FrameworkResult<()> { + let result = super::cleanup::teardown_one( + self.ctx.manager(), + self.ctx.bank(), + self.ctx.registry(), + &self.test_wallet, + ) + .await; + if result.is_ok() { + self.teardown_called = true; + } + result + } +} + +impl Drop for SetupGuard { + fn drop(&mut self) { + if !self.teardown_called { + tracing::warn!( + wallet_id = %hex::encode(self.test_wallet.id()), + "SetupGuard dropped without explicit teardown — wallet will be \ + swept on next test process startup" + ); + } + } +} + +/// `PlatformWalletError` → framework error envelope. +fn wallet_err(err: PlatformWalletError) -> FrameworkError { + FrameworkError::Wallet(err.to_string()) +} + +#[cfg(test)] +mod tests { + use super::*; + + /// Drift guard: our pinned defaults must match `PlatformPaymentAccountSpec::default()`. + /// If `key_wallet` ever changes its canonical defaults, this test fires. + #[test] + fn default_spec_matches_pinned_constants() { + let canonical = PlatformPaymentAccountSpec::default(); + assert_eq!(canonical.account, DEFAULT_ACCOUNT_INDEX_PUB); + assert_eq!(canonical.key_class, DEFAULT_KEY_CLASS_PUB); + assert_eq!(canonical, DEFAULT_PLATFORM_PAYMENT_ACCOUNT_SPEC); + } + + fn addr(byte: u8) -> PlatformAddress { + PlatformAddress::P2pkh([byte; 20]) + } + + /// PA-006 / PA-006b shape: one input address, one output address. + /// Caller passes the address's full balance as the input amount; + /// the helper must rewrite it to `Σ outputs` so the protocol's + /// `Σ in == Σ out` check passes. + #[test] + fn balance_explicit_inputs_single_address_matches_output_sum() { + let pv = PlatformVersion::latest(); + let in_addr = addr(0x01); + let out_addr = addr(0x02); + let inputs: BTreeMap<_, _> = std::iter::once((in_addr, 90_755_960u64)).collect(); + let outputs: BTreeMap<_, _> = std::iter::once((out_addr, 50_000_000u64)).collect(); + + let balanced = balance_explicit_inputs(&inputs, &outputs, pv).expect("balance"); + assert_eq!(balanced.len(), 1); + assert_eq!(balanced.get(&in_addr).copied(), Some(50_000_000)); + let in_sum: Credits = balanced.values().copied().sum(); + let out_sum: Credits = outputs.values().copied().sum(); + assert_eq!(in_sum, out_sum, "Σ inputs must equal Σ outputs"); + } + + /// Multi-input shape: split `Σ outputs` proportionally to the + /// caller-supplied weights; sum must match exactly. + #[test] + fn balance_explicit_inputs_multi_address_sum_matches() { + let pv = PlatformVersion::latest(); + let a = addr(0x01); + let b = addr(0x02); + let out = addr(0x09); + let inputs: BTreeMap<_, _> = [(a, 30_000_000u64), (b, 70_000_000u64)] + .into_iter() + .collect(); + let outputs: BTreeMap<_, _> = std::iter::once((out, 50_000_001u64)).collect(); + + let balanced = balance_explicit_inputs(&inputs, &outputs, pv).expect("balance"); + assert_eq!(balanced.len(), 2); + let in_sum: Credits = balanced.values().copied().sum(); + assert_eq!(in_sum, 50_000_001, "Σ inputs must equal Σ outputs exactly"); + + let min_input = pv.dpp.state_transitions.address_funds.min_input_amount; + for (a, v) in &balanced { + assert!( + *v >= min_input, + "share for {a:?} = {v} below min_input {min_input}" + ); + } + } + + /// Empty inputs are rejected up-front; the protocol requires ≥ 1 + /// input on every transfer transition. + #[test] + fn balance_explicit_inputs_rejects_empty() { + let pv = PlatformVersion::latest(); + let outputs: BTreeMap<_, _> = std::iter::once((addr(0x09), 50_000_000u64)).collect(); + let err = balance_explicit_inputs(&BTreeMap::new(), &outputs, pv).unwrap_err(); + assert!(matches!(err, FrameworkError::Wallet(_))); + } +} diff --git a/packages/rs-platform-wallet/tests/e2e/framework/workdir.rs b/packages/rs-platform-wallet/tests/e2e/framework/workdir.rs new file mode 100644 index 00000000000..9d059456623 --- /dev/null +++ b/packages/rs-platform-wallet/tests/e2e/framework/workdir.rs @@ -0,0 +1,125 @@ +//! Cross-process workdir slot selection via `flock`. Walks +//! `0..MAX_SLOTS` and returns the first slot whose `.lock` file is +//! exclusively claimable. The returned `File` MUST stay open for +//! the slot's lifetime — dropping it releases the lock. + +use std::fs::{self, File, OpenOptions}; +use std::io::ErrorKind; +use std::path::{Path, PathBuf}; + +use fs2::FileExt; + +use super::{FrameworkError, FrameworkResult}; + +/// Maximum concurrent test processes per machine; beyond this +/// [`pick_available_workdir`] errors rather than queueing. +pub const MAX_SLOTS: u32 = 10; + +/// Acquire an exclusive workdir slot under `base`. +/// +/// Returns `(slot_dir, lock_file)` — slot 0 is `base` itself, +/// higher slots are `-N`. The caller MUST keep `lock_file` +/// alive for the slot's lifetime; dropping it releases the lock. +pub fn pick_available_workdir(base: &Path) -> FrameworkResult<(PathBuf, File)> { + for slot in 0..MAX_SLOTS { + let dir = slot_dir(base, slot); + fs::create_dir_all(&dir).map_err(|err| { + FrameworkError::Io(format!("creating workdir {}: {err}", dir.display())) + })?; + + let lock_path = dir.join(".lock"); + let lock_file = OpenOptions::new() + .read(true) + .write(true) + .create(true) + .truncate(false) + .open(&lock_path) + .map_err(|err| { + FrameworkError::Io(format!("opening lock file {}: {err}", lock_path.display())) + })?; + + match FileExt::try_lock_exclusive(&lock_file) { + Ok(()) => { + tracing::info!( + target: "platform_wallet::e2e::workdir", + slot, + dir = %dir.display(), + "acquired workdir slot" + ); + return Ok((dir, lock_file)); + } + // `WouldBlock` is the only "slot is held by another + // process" outcome. Anything else (permission denied, + // unsupported filesystem, EIO, etc.) is propagated so + // operators see the real cause instead of a misleading + // "no available workdir slots" message after the loop. + Err(err) if err.kind() == ErrorKind::WouldBlock => { + tracing::debug!( + target: "platform_wallet::e2e::workdir", + slot, + dir = %dir.display(), + error = %err, + "workdir slot busy, trying next" + ); + // Dropping `lock_file` here releases the would-be + // lock without affecting the existing holder. + continue; + } + Err(err) => { + return Err(FrameworkError::Io(format!( + "locking {} failed (kind={:?}): {err}", + lock_path.display(), + err.kind() + ))); + } + } + } + + Err(FrameworkError::Io(format!( + "no available workdir slots (tried {} under {})", + MAX_SLOTS, + base.display() + ))) +} + +/// Slot 0 is `base`; higher slots append `-N`. Matches the DET +/// convention so on-disk artifacts from concurrent runs are +/// recognisable at a glance. +fn slot_dir(base: &Path, slot: u32) -> PathBuf { + if slot == 0 { + return base.to_path_buf(); + } + let parent = base.parent().unwrap_or_else(|| Path::new(".")); + let name = base + .file_name() + .map(|n| n.to_string_lossy().into_owned()) + .unwrap_or_else(|| "dash-platform-wallet-e2e".to_string()); + parent.join(format!("{name}-{slot}")) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn first_call_takes_slot_zero_second_falls_through() { + let dir = tempfile::tempdir().unwrap(); + let base = dir.path().join("e2e"); + + let (slot0_dir, _lock0) = pick_available_workdir(&base).unwrap(); + assert_eq!(slot0_dir, base); + + // With `_lock0` held, the next caller falls through to slot 1. + let (slot1_dir, _lock1) = pick_available_workdir(&base).unwrap(); + assert!( + slot1_dir.ends_with("e2e-1"), + "expected slot 1 to be `-1`, got {}", + slot1_dir.display() + ); + + drop(_lock0); + // After release slot 0 is reclaimable. + let (slot0_again, _lock0_again) = pick_available_workdir(&base).unwrap(); + assert_eq!(slot0_again, base); + } +} diff --git a/packages/rs-sdk/src/platform/transition.rs b/packages/rs-sdk/src/platform/transition.rs index b5aa9aa0516..b8fec6bd705 100644 --- a/packages/rs-sdk/src/platform/transition.rs +++ b/packages/rs-sdk/src/platform/transition.rs @@ -1,6 +1,6 @@ //! State transitions used to put changed objects to the Dash Platform. pub mod address_credit_withdrawal; -pub(crate) mod address_inputs; +pub mod address_inputs; pub mod broadcast; pub(crate) mod broadcast_identity; pub mod broadcast_request; diff --git a/packages/rs-sdk/src/platform/transition/address_inputs.rs b/packages/rs-sdk/src/platform/transition/address_inputs.rs index 38a5c4aecb3..d5d92a95023 100644 --- a/packages/rs-sdk/src/platform/transition/address_inputs.rs +++ b/packages/rs-sdk/src/platform/transition/address_inputs.rs @@ -9,7 +9,7 @@ use dpp::prelude::AddressNonce; use drive_proof_verifier::types::{AddressInfo, AddressInfos}; use std::collections::{BTreeMap, BTreeSet}; -pub(crate) async fn fetch_inputs_with_nonce( +pub async fn fetch_inputs_with_nonce( sdk: &Sdk, amounts: &BTreeMap, ) -> Result, Error> { @@ -31,7 +31,7 @@ pub(crate) async fn fetch_inputs_with_nonce( } /// Increments the nonce for each address in the provided map. -pub(crate) fn nonce_inc( +pub fn nonce_inc( data: BTreeMap, ) -> BTreeMap { data.into_iter() diff --git a/packages/simple-signer/Cargo.toml b/packages/simple-signer/Cargo.toml index 4bb9d4aa765..648a496b996 100644 --- a/packages/simple-signer/Cargo.toml +++ b/packages/simple-signer/Cargo.toml @@ -14,6 +14,8 @@ state-transitions = [ "dpp/bls-signatures", "dpp/state-transition-signing", ] +# Eager seed-based key derivation constructors (DIP-17 / DIP-9). +derive = ["dep:key-wallet", "dep:thiserror", "state-transitions"] [dependencies] dpp = { path = "../rs-dpp", default-features = false, features = [ @@ -24,6 +26,8 @@ bincode = { version = "=2.0.1", features = ["serde"] } base64 = { version = "0.22.1" } hex = { version = "0.4.3" } tracing = "0.1.41" +key-wallet = { workspace = true, optional = true } +thiserror = { version = "2.0.17", optional = true } [package.metadata.cargo-machete] ignored = ["bincode"] diff --git a/packages/simple-signer/src/signer.rs b/packages/simple-signer/src/signer.rs index c7fc229e551..e1f72f0fe4d 100644 --- a/packages/simple-signer/src/signer.rs +++ b/packages/simple-signer/src/signer.rs @@ -55,6 +55,34 @@ impl Debug for SimpleSigner { } } +/// Errors returned by the seed-based eager-derivation constructors. +#[cfg(feature = "derive")] +#[derive(Debug, thiserror::Error)] +pub enum SimpleSignerError { + /// The seed produced an invalid root extended private key. + #[error("invalid seed for root xpriv: {0}")] + InvalidSeed(String), + /// The DIP-17 / DIP-9 derivation path failed to construct. + #[error("derivation path: {0}")] + DerivationPath(String), + /// `derive_priv` failed at the given leaf index. + #[error("derive_priv at index {index}: {message}")] + DerivePriv { + /// Leaf index that failed. + index: u32, + /// Underlying key-wallet error message. + message: String, + }, + /// A leaf [`ChildNumber`] could not be constructed from the requested index. + #[error("invalid leaf index {index}: {message}")] + InvalidIndex { + /// Offending leaf index. + index: u32, + /// Underlying key-wallet error message. + message: String, + }, +} + impl SimpleSigner { /// Add a key to the signer pub fn add_identity_public_key( @@ -114,6 +142,103 @@ impl SimpleSigner { PlatformAddress::P2pkh(address_hash) } + + /// Build a [`SimpleSigner`] populated with the DIP-17 platform-payment + /// gap window for `(account, key_class)`. Each leaf + /// `m/9'/coin_type'/17'/account'/key_class'/index` derives a + /// secp256k1 keypair; the 20-byte RIPEMD160(SHA256(pubkey)) hash is + /// inserted into [`Self::address_private_keys`]. + #[cfg(feature = "derive")] + pub fn from_seed_for_platform_address_account( + seed: &[u8; 64], + network: key_wallet::Network, + account: u32, + key_class: u32, + gap_limit: u32, + ) -> Result { + use key_wallet::wallet::root_extended_keys::RootExtendedPrivKey; + use key_wallet::{AccountType, ChildNumber}; + + let root_priv = RootExtendedPrivKey::new_master(seed) + .map_err(|err| SimpleSignerError::InvalidSeed(err.to_string()))?; + let root_xpriv = root_priv.to_extended_priv_key(network); + + let account_path = AccountType::PlatformPayment { account, key_class } + .derivation_path(network) + .map_err(|err| SimpleSignerError::DerivationPath(err.to_string()))?; + + let secp = Secp256k1::new(); + let mut signer = Self::default(); + for index in 0..gap_limit { + let leaf = ChildNumber::from_normal_idx(index).map_err(|err| { + SimpleSignerError::InvalidIndex { + index, + message: err.to_string(), + } + })?; + // `extend` returns a fresh path; account_path is reused. + let leaf_path = account_path.extend([leaf]); + let xpriv = root_xpriv.derive_priv(&secp, &leaf_path).map_err(|err| { + SimpleSignerError::DerivePriv { + index, + message: err.to_string(), + } + })?; + let secret: SecretKey = xpriv.private_key; + let pubkey: PublicKey = PublicKey::from_secret_key(&secp, &secret); + let pkh = ripemd160_sha256(&pubkey.serialize()); + signer + .address_private_keys + .insert(pkh, secret.secret_bytes()); + } + Ok(signer) + } + + /// Build a [`SimpleSigner`] populated with the DIP-9 identity-authentication + /// (ECDSA) gap window for `identity_index`. The returned signer holds raw + /// secp256k1 secrets keyed on `(pubkey-hash, secret)` via + /// [`Self::address_private_keys`] — callers that need a `Signer` + /// view must additionally register `IdentityPublicKey` records via + /// [`Self::add_identity_public_key`] using the matching pubkey bytes. + #[cfg(feature = "derive")] + pub fn from_seed_for_identity( + seed: &[u8; 64], + network: key_wallet::Network, + identity_index: u32, + gap_limit: u32, + ) -> Result { + use key_wallet::bip32::KeyDerivationType; + use key_wallet::wallet::root_extended_keys::RootExtendedPrivKey; + use key_wallet::DerivationPath; + + let root_priv = RootExtendedPrivKey::new_master(seed) + .map_err(|err| SimpleSignerError::InvalidSeed(err.to_string()))?; + let root_xpriv = root_priv.to_extended_priv_key(network); + + let secp = Secp256k1::new(); + let mut signer = Self::default(); + for key_index in 0..gap_limit { + let leaf_path = DerivationPath::identity_authentication_path( + network, + KeyDerivationType::ECDSA, + identity_index, + key_index, + ); + let xpriv = root_xpriv.derive_priv(&secp, &leaf_path).map_err(|err| { + SimpleSignerError::DerivePriv { + index: key_index, + message: err.to_string(), + } + })?; + let secret: SecretKey = xpriv.private_key; + let pubkey: PublicKey = PublicKey::from_secret_key(&secp, &secret); + let pkh = ripemd160_sha256(&pubkey.serialize()); + signer + .address_private_keys + .insert(pkh, secret.secret_bytes()); + } + Ok(signer) + } } #[async_trait]