From b7fa2e3069baea96a9ff0a6304d413d4af4e17eb Mon Sep 17 00:00:00 2001 From: Esau Date: Fri, 2 Jan 2026 20:07:35 +0800 Subject: [PATCH] init --- .../docs/cli/bb_cli_reference.md | 288 ++++++++++++++++++ .../docs/cli/bb_cli_reference.md | 288 ++++++++++++++++++ docs/docs-words.txt | 5 + .../cli_reference_generation/cli_config.sh | 83 +++++ .../generate_cli_docs.sh | 47 ++- .../cli_reference_generation/scan_cli.py | 234 ++++++++++++-- .../transform_to_markdown.py | 82 ++++- .../update_all_cli_docs.sh | 26 +- .../update_cli_docs.sh | 92 +++--- 9 files changed, 1022 insertions(+), 123 deletions(-) create mode 100644 docs/developer_versioned_docs/version-v3.0.0-nightly.20260101/docs/cli/bb_cli_reference.md create mode 100644 docs/docs-developers/docs/cli/bb_cli_reference.md create mode 100755 docs/scripts/cli_reference_generation/cli_config.sh diff --git a/docs/developer_versioned_docs/version-v3.0.0-nightly.20260101/docs/cli/bb_cli_reference.md b/docs/developer_versioned_docs/version-v3.0.0-nightly.20260101/docs/cli/bb_cli_reference.md new file mode 100644 index 000000000000..71a1c1f82725 --- /dev/null +++ b/docs/developer_versioned_docs/version-v3.0.0-nightly.20260101/docs/cli/bb_cli_reference.md @@ -0,0 +1,288 @@ +--- +title: Barretenberg CLI Reference +description: Comprehensive auto-generated reference for the Barretenberg CLI Reference command-line interface with all commands and options. +tags: [cli, reference, autogenerated, barretenberg, proving] +sidebar_position: 3 +--- + +# Barretenberg CLI Reference + +*This documentation is auto-generated from the `bb` CLI help output.* + + +*Generated: Thu 01 Jan 2026 09:56:44 UTC* + +*Command: `bb`* + +## Table of Contents + +- [bb](#bb) + - [bb check](#bb-check) + - [bb gates](#bb-gates) + - [bb prove](#bb-prove) + - [bb write_vk](#bb-write_vk) + - [bb verify](#bb-verify) + - [bb write_solidity_verifier](#bb-write_solidity_verifier) + - [bb aztec_process](#bb-aztec_process) + - [bb msgpack](#bb-msgpack) + - [bb msgpack schema](#bb-msgpack-schema) + - [bb msgpack curve_constants](#bb-msgpack-curve_constants) + - [bb msgpack run](#bb-msgpack-run) +## bb + +Barretenberg Your favo(u)rite zkSNARK library written in C++, a perfectly good computer programming language. Aztec Virtual Machine (AVM): disabled AVM Transpiler: enabled Starknet Garaga Extensions: disabled + +**Usage:** +```bash +bb [OPTIONS] [SUBCOMMAND] +``` + +**Available Commands:** + +- `check` - A debugging tool to quickly check whether a witness satisfies a circuit The function constructs the execution trace and iterates through it row by row, applying the polynomial relations defining the gate types. For Chonk, we check the VKs in the folding stack. +- `gates` - Construct a circuit from the given bytecode (in particular, expand black box functions) and return the gate count information. +- `prove` - Generate a proof. +- `write_vk` - Write the verification key of a circuit. The circuit is constructed using quickly generated but invalid witnesses (which must be supplied in Barretenberg in order to expand ACIR black box opcodes), and no proof is constructed. +- `verify` - Verify a proof. +- `write_solidity_verifier` - Write a Solidity smart contract suitable for verifying proofs of circuit satisfiability for the circuit with verification key at vk_path. Not all hash types are implemented due to efficiency concerns. +- `aztec_process` - Process Aztec contract artifacts: transpile and generate verification keys for all private functions. If input is a directory (and no output specified), recursively processes all artifacts found in the directory. +- `msgpack` - Msgpack API interface. + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--version` - Print the version string. + + +### Subcommands + +### bb check + +A debugging tool to quickly check whether a witness satisfies a circuit The function constructs the execution trace and iterates through it row by row, applying the polynomial relations defining the gate types. For Chonk, we check the VKs in the folding stack. + +**Usage:** +```bash +bb check [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-b,--bytecode_path` - Path to ACIR bytecode generated by Noir. +- `-w,--witness_path` - Path to partial witness generated by Noir. +- `--ivc_inputs_path` - For IVC, path to input stack with bytecode and witnesses. +- `--update_inputs` - Update inputs if vk check fails. + +### bb gates + +Construct a circuit from the given bytecode (in particular, expand black box functions) and return the gate count information. + +**Usage:** +```bash +bb gates [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-b,--bytecode_path` - Path to ACIR bytecode generated by Noir. +- `--include_gates_per_opcode` - Include gates_per_opcode in the output of the gates command. +- `--oracle_hash` - The hash function used by the prover as random oracle standing in for a verifier's challenge generation. Poseidon2 is to be used for proofs that are intended to be verified inside of a circuit. Keccak is optimized for verification in an Ethereum smart contract, where Keccak has a privileged position due to the existence of an EVM precompile. Starknet is optimized for verification in a Starknet smart contract, which can be generated using the Garaga library. Options: \{poseidon2, keccak, starknet\} +- `--ipa_accumulation` - Accumulate/Aggregate IPA (Inner Product Argument) claims + +### bb prove + +Generate a proof. + +**Usage:** +```bash +bb prove [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-b,--bytecode_path` - Path to ACIR bytecode generated by Noir. +- `-w,--witness_path` - Path to partial witness generated by Noir. +- `-o,--output_path` - Directory to write files or path of file to write, depending on subcommand. +- `--ivc_inputs_path` - For IVC, path to input stack with bytecode and witnesses. +- `-k,--vk_path` - Path to a verification key. +- `--vk_policy` - Policy for handling verification keys during IVC accumulation. 'default' uses the provided VK as-is, 'check' verifies the provided VK matches the computed VK (throws error on mismatch), 'recompute' always ignores the provided VK and treats it as nullptr. Options: \{default, check, recompute\} +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--oracle_hash` - The hash function used by the prover as random oracle standing in for a verifier's challenge generation. Poseidon2 is to be used for proofs that are intended to be verified inside of a circuit. Keccak is optimized for verification in an Ethereum smart contract, where Keccak has a privileged position due to the existence of an EVM precompile. Starknet is optimized for verification in a Starknet smart contract, which can be generated using the Garaga library. Options: \{poseidon2, keccak, starknet\} +- `--write_vk` - Write the provided circuit's verification key +- `--ipa_accumulation` - Accumulate/Aggregate IPA (Inner Product Argument) claims +- `--disable_zk` - Use a non-zk version of --scheme. This flag is set to false by default. +- `--slow_low_memory` - Enable low memory mode (can be 2x slower or more). +- `--print_bench` - Pretty print op counts to standard error in a human-readable format. +- `--bench_out` - Path to write the op counts in a json. +- `--bench_out_hierarchical` - Path to write the hierarchical benchmark data (op counts and timings with parent-child relationships) as json. +- `--storage_budget` - Storage budget for FileBackedMemory (e.g. '500m', '2g'). When exceeded, falls back to RAM (requires +- `--slow_low_memory).` +- `--verify` - Verify the proof natively, resulting in a boolean output. Useful for testing. + +### bb write_vk + +Write the verification key of a circuit. The circuit is constructed using quickly generated but invalid witnesses (which must be supplied in Barretenberg in order to expand ACIR black box opcodes), and no proof is constructed. + +**Usage:** +```bash +bb write_vk [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-b,--bytecode_path` - Path to ACIR bytecode generated by Noir. +- `-o,--output_path` - Directory to write files or path of file to write, depending on subcommand. +- `--ivc_inputs_path` - For IVC, path to input stack with bytecode and witnesses. +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--oracle_hash` - The hash function used by the prover as random oracle standing in for a verifier's challenge generation. Poseidon2 is to be used for proofs that are intended to be verified inside of a circuit. Keccak is optimized for verification in an Ethereum smart contract, where Keccak has a privileged position due to the existence of an EVM precompile. Starknet is optimized for verification in a Starknet smart contract, which can be generated using the Garaga library. Options: \{poseidon2, keccak, starknet\} +- `--ipa_accumulation` - Accumulate/Aggregate IPA (Inner Product Argument) claims +- `--verifier_type` - Is a verification key for use a standalone single circuit verifier (e.g. a SNARK or folding recursive verifier) or is it for an ivc verifier? `standalone` produces a verification key is sufficient for verifying proofs about a single circuit (including the non-encsapsulated use case where an IVC scheme is manually constructed via recursive UltraHonk proof verification). `standalone_hiding` is similar to `standalone` but is used for the last step where the structured trace is not utilized. `ivc` produces a verification key for verifying the stack of run though a dedicated ivc verifier class (currently the only option is the Chonk class) Options: \{standalone, standalone_hiding, ivc\} +- `--disable_zk` - Use a non-zk version of --scheme. This flag is set to false by default. + +### bb verify + +Verify a proof. + +**Usage:** +```bash +bb verify [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-i,--public_inputs_path` - Path to public inputs. +- `-p,--proof_path` - Path to a proof. +- `-k,--vk_path` - Path to a verification key. +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--oracle_hash` - The hash function used by the prover as random oracle standing in for a verifier's challenge generation. Poseidon2 is to be used for proofs that are intended to be verified inside of a circuit. Keccak is optimized for verification in an Ethereum smart contract, where Keccak has a privileged position due to the existence of an EVM precompile. Starknet is optimized for verification in a Starknet smart contract, which can be generated using the Garaga library. Options: \{poseidon2, keccak, starknet\} +- `--disable_zk` - Use a non-zk version of --scheme. This flag is set to false by default. +- `--ipa_accumulation` - Accumulate/Aggregate IPA (Inner Product Argument) claims + +### bb write_solidity_verifier + +Write a Solidity smart contract suitable for verifying proofs of circuit satisfiability for the circuit with verification key at vk_path. Not all hash types are implemented due to efficiency concerns. + +**Usage:** +```bash +bb write_solidity_verifier [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-k,--vk_path` - Path to a verification key. +- `-o,--output_path` - Directory to write files or path of file to write, depending on subcommand. +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `--disable_zk` - Use a non-zk version of --scheme. This flag is set to false by default. +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--optimized` - Use the optimized Solidity verifier. + +### bb aztec_process + +Process Aztec contract artifacts: transpile and generate verification keys for all private functions. If input is a directory (and no output specified), recursively processes all artifacts found in the directory. + +**Usage:** +```bash +bb aztec_process [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-i,--input` - Input artifact JSON path or directory to search (optional, defaults to current directory) +- `-o,--output` - Output artifact JSON path (optional, same as input if not specified) +- `-f,--force` - Force regeneration of verification keys +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. + +### bb msgpack + +Msgpack API interface. + +**Usage:** +```bash +bb msgpack [OPTIONS] [SUBCOMMAND] +``` + +**Available Commands:** + +- `schema` - Output a msgpack schema encoded as JSON to stdout. +- `curve_constants` - Output curve constants as msgpack to stdout. +- `run` - Execute msgpack API commands from stdin or file. + +**Options:** + +- `-h,--help` - Print this help message and exit + + +#### Subcommands + +#### bb msgpack schema + +Output a msgpack schema encoded as JSON to stdout. + +**Usage:** +```bash +bb msgpack schema [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. + +#### bb msgpack curve_constants + +Output curve constants as msgpack to stdout. + +**Usage:** +```bash +bb msgpack curve_constants [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. + +#### bb msgpack run + +Execute msgpack API commands from stdin or file. + +**Usage:** +```bash +bb msgpack run [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-i,--input` - Input file containing msgpack buffers (defaults to stdin) +- `--max-clients` - Maximum concurrent clients for shared memory IPC server (default: 1) diff --git a/docs/docs-developers/docs/cli/bb_cli_reference.md b/docs/docs-developers/docs/cli/bb_cli_reference.md new file mode 100644 index 000000000000..d9d35c31ded1 --- /dev/null +++ b/docs/docs-developers/docs/cli/bb_cli_reference.md @@ -0,0 +1,288 @@ +--- +title: Barretenberg CLI Reference +description: Comprehensive auto-generated reference for the Barretenberg CLI Reference command-line interface with all commands and options. +tags: [cli, reference, autogenerated, barretenberg, proving] +sidebar_position: 3 +--- + +# Barretenberg CLI Reference + +*This documentation is auto-generated from the `bb` CLI help output.* + + +*Generated: Thu 01 Jan 2026 10:01:35 UTC* + +*Command: `bb`* + +## Table of Contents + +- [bb](#bb) + - [bb check](#bb-check) + - [bb gates](#bb-gates) + - [bb prove](#bb-prove) + - [bb write_vk](#bb-write_vk) + - [bb verify](#bb-verify) + - [bb write_solidity_verifier](#bb-write_solidity_verifier) + - [bb aztec_process](#bb-aztec_process) + - [bb msgpack](#bb-msgpack) + - [bb msgpack schema](#bb-msgpack-schema) + - [bb msgpack curve_constants](#bb-msgpack-curve_constants) + - [bb msgpack run](#bb-msgpack-run) +## bb + +Barretenberg Your favo(u)rite zkSNARK library written in C++, a perfectly good computer programming language. Aztec Virtual Machine (AVM): disabled AVM Transpiler: enabled Starknet Garaga Extensions: disabled + +**Usage:** +```bash +bb [OPTIONS] [SUBCOMMAND] +``` + +**Available Commands:** + +- `check` - A debugging tool to quickly check whether a witness satisfies a circuit The function constructs the execution trace and iterates through it row by row, applying the polynomial relations defining the gate types. For Chonk, we check the VKs in the folding stack. +- `gates` - Construct a circuit from the given bytecode (in particular, expand black box functions) and return the gate count information. +- `prove` - Generate a proof. +- `write_vk` - Write the verification key of a circuit. The circuit is constructed using quickly generated but invalid witnesses (which must be supplied in Barretenberg in order to expand ACIR black box opcodes), and no proof is constructed. +- `verify` - Verify a proof. +- `write_solidity_verifier` - Write a Solidity smart contract suitable for verifying proofs of circuit satisfiability for the circuit with verification key at vk_path. Not all hash types are implemented due to efficiency concerns. +- `aztec_process` - Process Aztec contract artifacts: transpile and generate verification keys for all private functions. If input is a directory (and no output specified), recursively processes all artifacts found in the directory. +- `msgpack` - Msgpack API interface. + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--version` - Print the version string. + + +### Subcommands + +### bb check + +A debugging tool to quickly check whether a witness satisfies a circuit The function constructs the execution trace and iterates through it row by row, applying the polynomial relations defining the gate types. For Chonk, we check the VKs in the folding stack. + +**Usage:** +```bash +bb check [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-b,--bytecode_path` - Path to ACIR bytecode generated by Noir. +- `-w,--witness_path` - Path to partial witness generated by Noir. +- `--ivc_inputs_path` - For IVC, path to input stack with bytecode and witnesses. +- `--update_inputs` - Update inputs if vk check fails. + +### bb gates + +Construct a circuit from the given bytecode (in particular, expand black box functions) and return the gate count information. + +**Usage:** +```bash +bb gates [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-b,--bytecode_path` - Path to ACIR bytecode generated by Noir. +- `--include_gates_per_opcode` - Include gates_per_opcode in the output of the gates command. +- `--oracle_hash` - The hash function used by the prover as random oracle standing in for a verifier's challenge generation. Poseidon2 is to be used for proofs that are intended to be verified inside of a circuit. Keccak is optimized for verification in an Ethereum smart contract, where Keccak has a privileged position due to the existence of an EVM precompile. Starknet is optimized for verification in a Starknet smart contract, which can be generated using the Garaga library. Options: \{poseidon2, keccak, starknet\} +- `--ipa_accumulation` - Accumulate/Aggregate IPA (Inner Product Argument) claims + +### bb prove + +Generate a proof. + +**Usage:** +```bash +bb prove [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-b,--bytecode_path` - Path to ACIR bytecode generated by Noir. +- `-w,--witness_path` - Path to partial witness generated by Noir. +- `-o,--output_path` - Directory to write files or path of file to write, depending on subcommand. +- `--ivc_inputs_path` - For IVC, path to input stack with bytecode and witnesses. +- `-k,--vk_path` - Path to a verification key. +- `--vk_policy` - Policy for handling verification keys during IVC accumulation. 'default' uses the provided VK as-is, 'check' verifies the provided VK matches the computed VK (throws error on mismatch), 'recompute' always ignores the provided VK and treats it as nullptr. Options: \{default, check, recompute\} +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--oracle_hash` - The hash function used by the prover as random oracle standing in for a verifier's challenge generation. Poseidon2 is to be used for proofs that are intended to be verified inside of a circuit. Keccak is optimized for verification in an Ethereum smart contract, where Keccak has a privileged position due to the existence of an EVM precompile. Starknet is optimized for verification in a Starknet smart contract, which can be generated using the Garaga library. Options: \{poseidon2, keccak, starknet\} +- `--write_vk` - Write the provided circuit's verification key +- `--ipa_accumulation` - Accumulate/Aggregate IPA (Inner Product Argument) claims +- `--disable_zk` - Use a non-zk version of --scheme. This flag is set to false by default. +- `--slow_low_memory` - Enable low memory mode (can be 2x slower or more). +- `--print_bench` - Pretty print op counts to standard error in a human-readable format. +- `--bench_out` - Path to write the op counts in a json. +- `--bench_out_hierarchical` - Path to write the hierarchical benchmark data (op counts and timings with parent-child relationships) as json. +- `--storage_budget` - Storage budget for FileBackedMemory (e.g. '500m', '2g'). When exceeded, falls back to RAM (requires +- `--slow_low_memory).` +- `--verify` - Verify the proof natively, resulting in a boolean output. Useful for testing. + +### bb write_vk + +Write the verification key of a circuit. The circuit is constructed using quickly generated but invalid witnesses (which must be supplied in Barretenberg in order to expand ACIR black box opcodes), and no proof is constructed. + +**Usage:** +```bash +bb write_vk [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-b,--bytecode_path` - Path to ACIR bytecode generated by Noir. +- `-o,--output_path` - Directory to write files or path of file to write, depending on subcommand. +- `--ivc_inputs_path` - For IVC, path to input stack with bytecode and witnesses. +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--oracle_hash` - The hash function used by the prover as random oracle standing in for a verifier's challenge generation. Poseidon2 is to be used for proofs that are intended to be verified inside of a circuit. Keccak is optimized for verification in an Ethereum smart contract, where Keccak has a privileged position due to the existence of an EVM precompile. Starknet is optimized for verification in a Starknet smart contract, which can be generated using the Garaga library. Options: \{poseidon2, keccak, starknet\} +- `--ipa_accumulation` - Accumulate/Aggregate IPA (Inner Product Argument) claims +- `--verifier_type` - Is a verification key for use a standalone single circuit verifier (e.g. a SNARK or folding recursive verifier) or is it for an ivc verifier? `standalone` produces a verification key is sufficient for verifying proofs about a single circuit (including the non-encsapsulated use case where an IVC scheme is manually constructed via recursive UltraHonk proof verification). `standalone_hiding` is similar to `standalone` but is used for the last step where the structured trace is not utilized. `ivc` produces a verification key for verifying the stack of run though a dedicated ivc verifier class (currently the only option is the Chonk class) Options: \{standalone, standalone_hiding, ivc\} +- `--disable_zk` - Use a non-zk version of --scheme. This flag is set to false by default. + +### bb verify + +Verify a proof. + +**Usage:** +```bash +bb verify [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-i,--public_inputs_path` - Path to public inputs. +- `-p,--proof_path` - Path to a proof. +- `-k,--vk_path` - Path to a verification key. +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--oracle_hash` - The hash function used by the prover as random oracle standing in for a verifier's challenge generation. Poseidon2 is to be used for proofs that are intended to be verified inside of a circuit. Keccak is optimized for verification in an Ethereum smart contract, where Keccak has a privileged position due to the existence of an EVM precompile. Starknet is optimized for verification in a Starknet smart contract, which can be generated using the Garaga library. Options: \{poseidon2, keccak, starknet\} +- `--disable_zk` - Use a non-zk version of --scheme. This flag is set to false by default. +- `--ipa_accumulation` - Accumulate/Aggregate IPA (Inner Product Argument) claims + +### bb write_solidity_verifier + +Write a Solidity smart contract suitable for verifying proofs of circuit satisfiability for the circuit with verification key at vk_path. Not all hash types are implemented due to efficiency concerns. + +**Usage:** +```bash +bb write_solidity_verifier [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-s,--scheme` - The type of proof to be constructed. This can specify a proving system, an accumulation scheme, or a particular type of circuit to be constructed and proven for some implicit scheme. Options: \{chonk, avm, ultra_honk\} + *Environment: `BB_SCHEME`* +- `-k,--vk_path` - Path to a verification key. +- `-o,--output_path` - Directory to write files or path of file to write, depending on subcommand. +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `--disable_zk` - Use a non-zk version of --scheme. This flag is set to false by default. +- `-c,--crs_path` - Path CRS directory. Missing CRS files will be retrieved from the internet. +- `--optimized` - Use the optimized Solidity verifier. + +### bb aztec_process + +Process Aztec contract artifacts: transpile and generate verification keys for all private functions. If input is a directory (and no output specified), recursively processes all artifacts found in the directory. + +**Usage:** +```bash +bb aztec_process [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-i,--input` - Input artifact JSON path or directory to search (optional, defaults to current directory) +- `-o,--output` - Output artifact JSON path (optional, same as input if not specified) +- `-f,--force` - Force regeneration of verification keys +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-d,--debug_logging` - Output debug logs to stderr. + +### bb msgpack + +Msgpack API interface. + +**Usage:** +```bash +bb msgpack [OPTIONS] [SUBCOMMAND] +``` + +**Available Commands:** + +- `schema` - Output a msgpack schema encoded as JSON to stdout. +- `curve_constants` - Output curve constants as msgpack to stdout. +- `run` - Execute msgpack API commands from stdin or file. + +**Options:** + +- `-h,--help` - Print this help message and exit + + +#### Subcommands + +#### bb msgpack schema + +Output a msgpack schema encoded as JSON to stdout. + +**Usage:** +```bash +bb msgpack schema [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. + +#### bb msgpack curve_constants + +Output curve constants as msgpack to stdout. + +**Usage:** +```bash +bb msgpack curve_constants [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. + +#### bb msgpack run + +Execute msgpack API commands from stdin or file. + +**Usage:** +```bash +bb msgpack run [OPTIONS] +``` + +**Options:** + +- `-h,--help` - Print this help message and exit +- `-v,--verbose,--verbose_logging` - Output all logs to stderr. +- `-i,--input` - Input file containing msgpack buffers (defaults to stdin) +- `--max-clients` - Maximum concurrent clients for shared memory IPC server (default: 1) diff --git a/docs/docs-words.txt b/docs/docs-words.txt index 17ac4904ce4d..ebf82608046b 100644 --- a/docs/docs-words.txt +++ b/docs/docs-words.txt @@ -367,3 +367,8 @@ whoooaa flamegraphs mycontract notegetteroptions +Garaga +satisfiability +nullptr +favo +encsapsulated diff --git a/docs/scripts/cli_reference_generation/cli_config.sh b/docs/scripts/cli_reference_generation/cli_config.sh new file mode 100755 index 000000000000..8f4a0a712ad9 --- /dev/null +++ b/docs/scripts/cli_reference_generation/cli_config.sh @@ -0,0 +1,83 @@ +#!/bin/bash +# Shared CLI configuration for documentation generation scripts +# Source this file to get CLI configuration variables +# +# Usage: +# source "$(dirname "${BASH_SOURCE[0]}")/cli_config.sh" +# get_cli_config "aztec" +# echo "$CLI_DISPLAY_NAME" # "Aztec CLI" + +# Valid CLI names +readonly VALID_CLIS=("aztec" "aztec-wallet" "bb") + +# Get configuration for a CLI +# Sets: CLI_DISPLAY_NAME, CLI_TITLE, CLI_COMMAND, CLI_FORMAT, +# CLI_OUTPUT_FILE, CLI_SIDEBAR_POSITION, CLI_TAGS +# +# CLI_SIDEBAR_POSITION controls the order in Docusaurus sidebar: +# 1 = aztec (primary CLI, listed first) +# 2 = aztec-wallet (secondary CLI) +# 3 = bb (Barretenberg proving backend) +get_cli_config() { + local cli_name="$1" + + case "$cli_name" in + aztec) + CLI_DISPLAY_NAME="Aztec CLI" + CLI_TITLE="Aztec CLI Reference" + CLI_COMMAND="aztec" + CLI_FORMAT="commander" + CLI_OUTPUT_FILE="aztec_cli_reference.md" + CLI_SIDEBAR_POSITION="1" + CLI_TAGS="[cli, reference, autogenerated]" + ;; + aztec-wallet) + CLI_DISPLAY_NAME="Aztec Wallet CLI" + CLI_TITLE="Aztec Wallet CLI Reference" + CLI_COMMAND="aztec-wallet" + CLI_FORMAT="commander" + CLI_OUTPUT_FILE="aztec_wallet_cli_reference.md" + CLI_SIDEBAR_POSITION="2" + CLI_TAGS="[cli, reference, autogenerated, wallet]" + ;; + bb) + CLI_DISPLAY_NAME="Barretenberg CLI" + CLI_TITLE="Barretenberg CLI Reference" + CLI_COMMAND="bb" + CLI_FORMAT="cli11" + CLI_OUTPUT_FILE="bb_cli_reference.md" + CLI_SIDEBAR_POSITION="3" + CLI_TAGS="[cli, reference, autogenerated, barretenberg, proving]" + ;; + *) + echo "Error: Unknown CLI '$cli_name'. Valid options: ${VALID_CLIS[*]}" >&2 + return 1 + ;; + esac +} + +# Validate CLI name +validate_cli_name() { + local cli_name="$1" + for valid in "${VALID_CLIS[@]}"; do + if [[ "$cli_name" == "$valid" ]]; then + return 0 + fi + done + echo "Error: Invalid CLI name '$cli_name'. Must be one of: ${VALID_CLIS[*]}" >&2 + return 1 +} + +# Get install instructions for a CLI +get_cli_install_instructions() { + local cli_name="$1" + case "$cli_name" in + bb) + echo "Install bb from: https://github.com/AztecProtocol/aztec-packages" + ;; + *) + echo "Run: aztec-up " + ;; + esac +} + diff --git a/docs/scripts/cli_reference_generation/generate_cli_docs.sh b/docs/scripts/cli_reference_generation/generate_cli_docs.sh index 29d417b0f1bb..1e4359c704d6 100755 --- a/docs/scripts/cli_reference_generation/generate_cli_docs.sh +++ b/docs/scripts/cli_reference_generation/generate_cli_docs.sh @@ -5,14 +5,19 @@ # Examples: # ./scripts/cli_reference_generation/generate_cli_docs.sh aztec # ./scripts/cli_reference_generation/generate_cli_docs.sh aztec-wallet /tmp +# ./scripts/cli_reference_generation/generate_cli_docs.sh bb set -euo pipefail +# Get script directory and source shared config +readonly SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/cli_config.sh" + # Validate arguments if [[ $# -lt 1 ]]; then echo "Error: CLI name is required" echo "Usage: $0 [output_dir]" - echo " cli_name: 'aztec' or 'aztec-wallet'" + echo " cli_name: ${VALID_CLIS[*]}" echo " output_dir: Output directory (default: current directory)" exit 1 fi @@ -20,46 +25,28 @@ fi CLI_NAME="$1" OUTPUT_DIR="${2:-.}" -# Validate CLI name -if [[ "$CLI_NAME" != "aztec" && "$CLI_NAME" != "aztec-wallet" ]]; then - echo "Error: Invalid CLI name '$CLI_NAME'. Must be 'aztec' or 'aztec-wallet'" - exit 1 -fi +# Validate and load CLI config +validate_cli_name "$CLI_NAME" || exit 1 +get_cli_config "$CLI_NAME" -# Get script directory -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -readonly SCRIPT_DIR OUTPUT_DIR CLI_NAME +readonly OUTPUT_DIR CLI_NAME -# Configuration (compatible with bash 3.2+) -case "$CLI_NAME" in - aztec) - DISPLAY_NAME="Aztec CLI" - TITLE="Aztec CLI Reference" - COMMAND="aztec" - JSON_FILE="$OUTPUT_DIR/aztec_cli_docs.json" - MD_FILE="$OUTPUT_DIR/aztec_cli_reference.md" - ;; - aztec-wallet) - DISPLAY_NAME="Aztec Wallet CLI" - TITLE="Aztec Wallet CLI Reference" - COMMAND="aztec-wallet" - JSON_FILE="$OUTPUT_DIR/aztec_wallet_cli_docs.json" - MD_FILE="$OUTPUT_DIR/aztec_wallet_cli_reference.md" - ;; -esac +# Derived paths +JSON_FILE="$OUTPUT_DIR/${CLI_OUTPUT_FILE%.md}.json" +MD_FILE="$OUTPUT_DIR/$CLI_OUTPUT_FILE" -echo "=== ${DISPLAY_NAME} Documentation Generator ===" +echo "=== ${CLI_DISPLAY_NAME} Documentation Generator ===" echo "" -echo "Step 1: Scanning ${COMMAND} CLI commands..." -python3 "$SCRIPT_DIR/scan_cli.py" --command "$COMMAND" --output "$JSON_FILE" +echo "Step 1: Scanning ${CLI_COMMAND} CLI commands (format: ${CLI_FORMAT})..." +python3 "$SCRIPT_DIR/scan_cli.py" --command "$CLI_COMMAND" --cli-format "$CLI_FORMAT" --output "$JSON_FILE" echo "" echo "Step 2: Generating markdown documentation..." python3 "$SCRIPT_DIR/transform_to_markdown.py" \ --input "$JSON_FILE" \ --output "$MD_FILE" \ - --title "$TITLE" + --title "$CLI_TITLE" echo "" echo "=== Documentation Generated ===" diff --git a/docs/scripts/cli_reference_generation/scan_cli.py b/docs/scripts/cli_reference_generation/scan_cli.py index bee2aa770ea3..c40e34389024 100755 --- a/docs/scripts/cli_reference_generation/scan_cli.py +++ b/docs/scripts/cli_reference_generation/scan_cli.py @@ -25,10 +25,19 @@ class CLIScanner: """Recursively scans a CLI command tree and extracts help information.""" - def __init__(self, base_command: str = "aztec"): + # Supported CLI formats + FORMAT_COMMANDER = "commander" # Commander.js (Node.js) - uses "Commands:" section + FORMAT_CLI11 = "cli11" # CLI11 (C++) - uses "Subcommands:" section + FORMAT_CUSTOM = "custom" # Custom format with uppercase section headers + FORMAT_AUTO = "auto" # Auto-detect format + + # Maximum recursion depth for scanning subcommands (prevents runaway recursion) + MAX_DEPTH = 5 + + def __init__(self, base_command: str = "aztec", cli_format: str = "auto"): self.base_command = base_command + self.cli_format = cli_format self.visited = set() # Track visited commands to avoid loops - self.help_cache = {} # Cache help output to detect duplicates def run_command(self, cmd: List[str]) -> Optional[str]: """Execute a command and return its output.""" @@ -180,6 +189,184 @@ def parse_custom_help(self, help_text: str) -> Dict[str, Any]: return result + def parse_cli11_help(self, help_text: str) -> Dict[str, Any]: + """Parse CLI11 (C++ library) style help output (like 'bb'). + CLI11 format: + - Description on first lines (before Usage:) + - Usage: command [OPTIONS] [SUBCOMMAND] + - Options: with format -short,--long [ENV_VAR] followed by indented description + - Subcommands: with name followed by description (may wrap to next lines) + + Returns: + Parsed help structure, or error dict if parsing fails. + """ + result = { + "usage": "", + "description": "", + "options": [], + "commands": [] + } + + # Validate input + if not help_text or not isinstance(help_text, str): + return { + "error": "invalid_help_text", + "error_message": "Help text is empty or not a string", + "usage": "", + "description": "", + "options": [], + "commands": [] + } + + try: + lines = help_text.split('\n') + current_section = None + description_lines = [] + current_option = None + current_command = None + + for line in lines: + # Extract usage + if line.strip().startswith('Usage:'): + result["usage"] = line.replace('Usage:', '').strip() + current_section = 'post_usage' + continue + + # Collect description (lines before Usage:) + if current_section is None and line.strip(): + description_lines.append(line.strip()) + continue + + # Section headers + if line.strip() == 'Options:': + current_section = 'options' + current_option = None + continue + elif line.strip() == 'Subcommands:': + current_section = 'subcommands' + current_command = None + continue + + # Parse options section + if current_section == 'options': + # Option line: starts with dash(es), format: -h,--help or --long [ENV_VAR] + # Regex breakdown: + # ^\s+ - Leading whitespace (indentation) + # (-[^\s,]+ - Short flag: dash followed by non-space/comma chars (e.g., -h, -v) + # (?:,\s*--[^\s\[]+)? - Optional: comma + long flag without brackets (e.g., ,--help) + # (?:\s*,\s*--[^\s\[]+)? - Optional: another long flag variant (for multi-flag options) + # )\s* - End of flags group + optional whitespace + # (\[[^\]]+\])? - Optional: environment variable in brackets (e.g., [BB_ENV]) + # \s*$ - Trailing whitespace + end of line + option_match = re.match(r'^\s+(-[^\s,]+(?:,\s*--[^\s\[]+)?(?:\s*,\s*--[^\s\[]+)?)\s*(\[[^\]]+\])?\s*$', line) + if option_match: + # This is an option with its description on next line(s) + flags = option_match.group(1).strip() + env_var = option_match.group(2).strip('[]') if option_match.group(2) else None + current_option = { + "flags": flags, + "env": env_var, + "description": "" + } + result["options"].append(current_option) + continue + + # Check for option with inline description: -h,--help Description here + # Regex breakdown: + # ^\s+ - Leading whitespace (indentation) + # (-[^\s]+ - Short flag: dash followed by non-space chars + # (?:,\s*--[^\s]+)? - Optional: comma + long flag (e.g., ,--help) + # (?:\s*,\s*--[^\s]+)? - Optional: another long flag variant + # ) - End of flags group + # \s{2,} - Two or more spaces (separator between flags and description) + # (.+)$ - Description text to end of line + inline_option_match = re.match(r'^\s+(-[^\s]+(?:,\s*--[^\s]+)?(?:\s*,\s*--[^\s]+)?)\s{2,}(.+)$', line) + if inline_option_match: + flags = inline_option_match.group(1).strip() + desc = inline_option_match.group(2).strip() + current_option = { + "flags": flags, + "env": None, + "description": desc + } + result["options"].append(current_option) + continue + + # Continuation of option description (indented text) + if current_option and line.strip() and re.match(r'^\s{10,}', line): + desc_text = line.strip() + if current_option["description"]: + current_option["description"] += " " + desc_text + else: + current_option["description"] = desc_text + continue + + # Parse subcommands section + if current_section == 'subcommands': + # Subcommand line: name followed by description (2+ spaces between) + # Regex breakdown: + # ^(\S+) - Command name at start of line (non-whitespace chars) + # \s{2,} - Two or more spaces (separator) + # (.+)$ - Description text to end of line + cmd_match = re.match(r'^(\S+)\s{2,}(.+)$', line) + if cmd_match: + cmd_name = cmd_match.group(1).strip() + cmd_desc = cmd_match.group(2).strip() + current_command = { + "name": cmd_name, + "signature": cmd_name, + "description": cmd_desc + } + result["commands"].append(current_command) + continue + + # Continuation of subcommand description (indented text) + if current_command and line.strip() and re.match(r'^\s{2,}', line): + desc_text = line.strip() + current_command["description"] += " " + desc_text + continue + + # Set description from collected lines + if description_lines: + result["description"] = " ".join(description_lines) + + return result + + except Exception as e: + # Return error dict if parsing fails unexpectedly + return { + "error": "parse_error", + "error_message": f"Failed to parse CLI11 help output: {str(e)}", + "usage": result.get("usage", ""), + "description": result.get("description", ""), + "options": result.get("options", []), + "commands": result.get("commands", []) + } + + def _detect_or_use_format(self, help_output: str) -> str: + """Return the format to use: explicit if set, otherwise auto-detect.""" + if self.cli_format != self.FORMAT_AUTO: + return self.cli_format + + # Auto-detect by checking for section headers at start of lines + has_commands_section = re.search(r'^Commands:', help_output, re.MULTILINE) + has_subcommands_section = re.search(r'^Subcommands:', help_output, re.MULTILINE) + has_options_section = re.search(r'^Options:', help_output, re.MULTILINE) + has_usage_line = re.search(r'^Usage:', help_output, re.MULTILINE) + + if has_usage_line and has_commands_section: + return self.FORMAT_COMMANDER + elif has_usage_line and has_subcommands_section: + return self.FORMAT_CLI11 + elif has_usage_line and has_options_section: + # CLI11 leaf command (has options but no subcommands) + return self.FORMAT_CLI11 + elif re.search(r'^\s{2}[A-Z][A-Z\s]+$', help_output, re.MULTILINE): + # Custom format with uppercase section headers + return self.FORMAT_CUSTOM + else: + return "raw" + def scan_command(self, cmd_path: List[str], depth: int = 0, parent_help: Optional[str] = None) -> Dict[str, Any]: """Recursively scan a command and its subcommands.""" cmd_str = ' '.join(cmd_path) @@ -191,7 +378,7 @@ def scan_command(self, cmd_path: List[str], depth: int = 0, parent_help: Optiona self.visited.add(cmd_str) # Limit depth to prevent runaway recursion - if depth > 5: + if depth > self.MAX_DEPTH: return {"error": "max_depth_exceeded"} print(f"{' ' * depth}Scanning: {cmd_str}") @@ -223,38 +410,37 @@ def scan_command(self, cmd_path: List[str], depth: int = 0, parent_help: Optiona "error_preview": help_output[:200] } - # Determine help format and parse - if 'Usage:' in help_output and 'Commands:' in help_output: - # Commander.js style - parsed = self.parse_commander_help(help_output) - parsed["format"] = "commander" + # Determine format and parse accordingly + detected_format = self._detect_or_use_format(help_output) + parsed = self._parse_help(detected_format, help_output) - # Recursively scan subcommands + # Recursively scan subcommands for formats that support them + if detected_format in (self.FORMAT_COMMANDER, self.FORMAT_CLI11): subcommands = {} for cmd in parsed.get("commands", []): cmd_name = cmd["name"] if cmd_name != "help": # Skip help command sub_result = self.scan_command(cmd_path + [cmd_name], depth + 1, help_output) - # Include all subcommands, even ones with errors - # Error commands will be rendered with stub sections subcommands[cmd_name] = sub_result if subcommands: parsed["subcommands"] = subcommands - elif re.search(r'^\s{2}[A-Z][A-Z\s]+$', help_output, re.MULTILINE): - # Custom format (like 'aztec start') - detected after ANSI stripping - # Look for section headers like " MISC", " SANDBOX", etc. + return parsed + + def _parse_help(self, format_type: str, help_output: str) -> Dict[str, Any]: + """Parse help output based on detected format type.""" + if format_type == self.FORMAT_COMMANDER: + parsed = self.parse_commander_help(help_output) + parsed["format"] = "commander" + elif format_type == self.FORMAT_CLI11: + parsed = self.parse_cli11_help(help_output) + parsed["format"] = "cli11" + elif format_type == self.FORMAT_CUSTOM: parsed = self.parse_custom_help(help_output) parsed["format"] = "custom" - else: - # Unknown format, just store raw - parsed = { - "format": "raw", - "raw_help": help_output - } - + parsed = {"format": "raw", "raw_help": help_output} return parsed def scan(self) -> Dict[str, Any]: @@ -273,6 +459,10 @@ def main(): help='Output format (default: json)') parser.add_argument('--command', '-c', default='aztec', help='Base command to scan (default: aztec)') + parser.add_argument('--cli-format', dest='cli_format', + choices=['auto', 'commander', 'cli11', 'custom'], + default='auto', + help='CLI help format: commander (Node.js), cli11 (C++), custom, or auto-detect (default: auto)') args = parser.parse_args() @@ -285,7 +475,7 @@ def main(): args.output = args.output.replace('.yaml', '.json').replace('.yml', '.json') # Scan the CLI - scanner = CLIScanner(base_command=args.command) + scanner = CLIScanner(base_command=args.command, cli_format=args.cli_format) result = scanner.scan() # Write output diff --git a/docs/scripts/cli_reference_generation/transform_to_markdown.py b/docs/scripts/cli_reference_generation/transform_to_markdown.py index 782a8705a3f6..0c0f8b4d09a2 100755 --- a/docs/scripts/cli_reference_generation/transform_to_markdown.py +++ b/docs/scripts/cli_reference_generation/transform_to_markdown.py @@ -23,6 +23,9 @@ class MarkdownGenerator: """Generates markdown documentation from structured CLI data.""" + # Maximum depth for rendering nested subcommands (should match CLIScanner.MAX_DEPTH) + MAX_DEPTH = 5 + def __init__(self, data: Dict[str, Any], config: Dict[str, Any] = None): self.data = data # Merge provided config with defaults @@ -40,7 +43,7 @@ def get_default_config(self) -> Dict[str, Any]: "heading_prefix": "#", "show_usage": True, "show_env_vars": True, - "max_depth": 5, + "max_depth": self.MAX_DEPTH, "option_table_format": "list", # or "table" } @@ -71,7 +74,11 @@ def generate(self) -> str: depth=1 )) - return "\n".join(sections) + # Join sections and normalize multiple blank lines to single blank lines + result = "\n".join(sections) + while "\n\n\n" in result: + result = result.replace("\n\n\n", "\n\n") + return result def generate_toc(self, cmd_data: Dict[str, Any], cmd_name: str, depth: int = 0) -> str: """Generate table of contents.""" @@ -86,7 +93,7 @@ def generate_toc(self, cmd_data: Dict[str, Any], cmd_name: str, depth: int = 0) lines.append(f"{indent}- [{cmd_name}](#{slug})") # Add subcommands - if cmd_data.get("format") == "commander" and "subcommands" in cmd_data: + if cmd_data.get("format") in ("commander", "cli11") and "subcommands" in cmd_data: for sub_name, sub_data in cmd_data["subcommands"].items(): # Include all commands in TOC, even ones with errors full_name = f"{cmd_name} {sub_name}" @@ -101,9 +108,12 @@ def slugify(self, text: str) -> str: return text.lower().replace(' ', '-').replace('/', '-') def escape_html_entities(self, text: str) -> str: - """Escape HTML-like syntax (angle brackets) to prevent MDX parsing errors.""" + """Escape HTML-like syntax and MDX special characters to prevent parsing errors.""" # Replace < and > with their HTML entities to prevent MDX from treating them as tags - return text.replace('<', '<').replace('>', '>') + text = text.replace('<', '<').replace('>', '>') + # Escape curly braces to prevent MDX from treating them as JSX expressions + text = text.replace('{', '\\{').replace('}', '\\}') + return text def generate_command_docs(self, cmd_name: str, cmd_data: Dict[str, Any], depth: int = 1) -> str: """Generate documentation for a single command.""" @@ -137,6 +147,16 @@ def generate_command_docs(self, cmd_name: str, cmd_data: Dict[str, Any], depth: full_name = f"{cmd_name} {sub_name}" sections.append(self.generate_command_docs(full_name, sub_data, depth + 1)) + elif cmd_data.get("format") == "cli11": + sections.append(self.render_cli11_command(cmd_data, depth)) + + # Recursively render subcommands + if "subcommands" in cmd_data: + sections.append(f"\n{heading}# Subcommands\n") + for sub_name, sub_data in cmd_data["subcommands"].items(): + full_name = f"{cmd_name} {sub_name}" + sections.append(self.generate_command_docs(full_name, sub_data, depth + 1)) + elif cmd_data.get("format") == "custom": sections.append(self.render_custom_command(cmd_data, depth)) @@ -180,6 +200,58 @@ def render_commander_command(self, cmd_data: Dict[str, Any], depth: int) -> str: return "\n".join(sections) + def render_cli11_command(self, cmd_data: Dict[str, Any], depth: int) -> str: + """Render a CLI11 style command (C++ CLI library, used by bb).""" + sections = [] + + # Description + if cmd_data.get("description"): + sections.append(self.escape_html_entities(cmd_data["description"]) + "\n") + + # Usage + if self.config.get("show_usage") and cmd_data.get("usage"): + sections.append("**Usage:**") + sections.append("```bash") + sections.append(cmd_data["usage"]) + sections.append("```\n") + + # Commands list (subcommands) - use same header as Commander.js for consistency + if cmd_data.get("commands"): + sections.append("**Available Commands:**\n") + for cmd in cmd_data["commands"]: + # Use signature if available (like Commander.js), fall back to name + signature = cmd.get('signature', cmd['name']) + sections.append(f"- `{signature}` - {self.escape_html_entities(cmd['description'])}") + sections.append("") + + # Options + if cmd_data.get("options"): + sections.append("**Options:**\n") + sections.append(self.render_cli11_options(cmd_data["options"])) + sections.append("") + + return "\n".join(sections) + + def render_cli11_options(self, options: List[Dict[str, Any]]) -> str: + """Render CLI11 options as a bulleted list.""" + lines = [] + + for opt in options: + flags = opt.get('flags', '') + desc = self.escape_html_entities(opt.get('description', '')) + env = opt.get('env') + + line = f"- `{flags}`" + if desc: + line += f" - {desc}" + lines.append(line) + + # Show environment variable if available + if self.config.get("show_env_vars") and env: + lines.append(f" *Environment: `{env}`*") + + return "\n".join(lines) + def render_custom_command(self, cmd_data: Dict[str, Any], depth: int) -> str: """Render a custom formatted command (like 'aztec start').""" sections = [] diff --git a/docs/scripts/cli_reference_generation/update_all_cli_docs.sh b/docs/scripts/cli_reference_generation/update_all_cli_docs.sh index 99955011074d..9e12d9e7b222 100755 --- a/docs/scripts/cli_reference_generation/update_all_cli_docs.sh +++ b/docs/scripts/cli_reference_generation/update_all_cli_docs.sh @@ -1,33 +1,33 @@ #!/bin/bash -# Script to regenerate auto-generated CLI documentation for both aztec and aztec-wallet +# Script to regenerate auto-generated CLI documentation for all supported CLIs # Usage: ./scripts/cli_reference_generation/update_all_cli_docs.sh [target_version] [output_dir] # # Examples: # ./scripts/cli_reference_generation/update_all_cli_docs.sh # Updates all versions # ./scripts/cli_reference_generation/update_all_cli_docs.sh v2.0.2 # Updates only v2.0.2 # ./scripts/cli_reference_generation/update_all_cli_docs.sh current # Updates only main docs folder -# ./scripts/cli_reference_generation/update_all_cli_docs.sh v2.0.2 /tmp/ # Outputs both to /tmp/ +# ./scripts/cli_reference_generation/update_all_cli_docs.sh v2.0.2 /tmp/ # Outputs all to /tmp/ set -euo pipefail +# Get script directory and source shared config SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/cli_config.sh" + TARGET_VERSION="${1:-all}" OUTPUT_DIR="${2:-}" readonly SCRIPT_DIR TARGET_VERSION OUTPUT_DIR -# Array of CLIs to process -readonly CLIS=("aztec" "aztec-wallet") - echo "=== Update All CLI Documentation Script ===" echo "" echo "This script will update documentation for:" -for cli in "${CLIS[@]}"; do +for cli in "${VALID_CLIS[@]}"; do echo " - $cli CLI" done echo "" # Process each CLI using the unified script -for cli in "${CLIS[@]}"; do +for cli in "${VALID_CLIS[@]}"; do echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "Updating $cli CLI Documentation" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" @@ -47,16 +47,12 @@ echo "✅ All CLI Documentation Updated" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "" echo "Files updated:" -for cli in "${CLIS[@]}"; do - if [[ "$cli" == "aztec" ]]; then - filename="aztec_cli_reference.md" - else - filename="aztec_wallet_cli_reference.md" - fi +for cli in "${VALID_CLIS[@]}"; do + get_cli_config "$cli" if [[ -n "$OUTPUT_DIR" ]]; then - echo " - $OUTPUT_DIR/$filename" + echo " - $OUTPUT_DIR/$CLI_OUTPUT_FILE" else - echo " - $filename" + echo " - $CLI_OUTPUT_FILE" fi done echo "" diff --git a/docs/scripts/cli_reference_generation/update_cli_docs.sh b/docs/scripts/cli_reference_generation/update_cli_docs.sh index bf277cee410a..db097d2de886 100755 --- a/docs/scripts/cli_reference_generation/update_cli_docs.sh +++ b/docs/scripts/cli_reference_generation/update_cli_docs.sh @@ -7,14 +7,19 @@ # ./scripts/cli_reference_generation/update_cli_docs.sh aztec-wallet current # Updates aztec-wallet CLI, current only # ./scripts/cli_reference_generation/update_cli_docs.sh aztec v2.0.2 # Updates aztec CLI, v2.0.2 only # ./scripts/cli_reference_generation/update_cli_docs.sh aztec v2.0.2 /tmp/ # Outputs to /tmp/aztec_cli_reference.md +# ./scripts/cli_reference_generation/update_cli_docs.sh bb # Updates bb CLI, all versions set -euo pipefail # Added 'u' for undefined variable check, 'o pipefail' for pipe failures +# Get script directory and source shared config (early, for VALID_CLIS in usage text) +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +source "$SCRIPT_DIR/cli_config.sh" + # Validate arguments if [[ $# -lt 1 ]]; then echo "Error: CLI name is required" echo "Usage: $0 [target_version] [output_dir]" - echo " cli_name: 'aztec' or 'aztec-wallet'" + echo " cli_name: ${VALID_CLIS[*]}" echo " target_version: 'all' (default), 'current', or version like 'v2.0.2'" echo " output_dir: Optional custom output directory (skips deployment to docs folders)" exit 1 @@ -24,11 +29,11 @@ CLI_NAME="$1" TARGET_VERSION="${2:-all}" OUTPUT_DIR="${3:-}" -# Validate CLI name -if [[ "$CLI_NAME" != "aztec" && "$CLI_NAME" != "aztec-wallet" ]]; then - echo "Error: Invalid CLI name '$CLI_NAME'. Must be 'aztec' or 'aztec-wallet'" - exit 1 -fi +# Validate CLI name using shared validation +validate_cli_name "$CLI_NAME" || exit 1 + +# Load CLI configuration from shared config (needed for version check) +get_cli_config "$CLI_NAME" # Version check: ensure installed CLI version matches target version check_cli_version() { @@ -43,7 +48,7 @@ check_cli_version() { # Check if CLI is installed if ! command -v "$cli_cmd" &> /dev/null; then echo "Error: '$cli_cmd' command not found. Please install it first." - echo " Run: aztec-up ${target_ver#v}" + echo " $(get_cli_install_instructions "$cli_cmd")" exit 1 fi @@ -69,7 +74,7 @@ check_cli_version() { echo "║ help output. To generate accurate docs for $target_ver, " echo "║ you must have that version installed. ║" echo "╠════════════════════════════════════════════════════════════════╣" - echo "║ To fix, run: aztec-up $target_ver_normalized" + echo "║ To fix: $(get_cli_install_instructions "$cli_cmd")" echo "╚════════════════════════════════════════════════════════════════╝" echo "" read -p "Continue anyway? (y/N): " -n 1 -r @@ -84,10 +89,10 @@ check_cli_version() { fi } -check_cli_version "$CLI_NAME" "$TARGET_VERSION" +# Use CLI_COMMAND (the actual executable name) for version check +check_cli_version "$CLI_COMMAND" "$TARGET_VERSION" # Constants -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" DOCS_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" readonly SCRIPT_DIR DOCS_ROOT TARGET_VERSION CLI_NAME @@ -102,30 +107,12 @@ readonly TEMP_JSON="$TEMP_DIR/cli_docs.json" readonly TEMP_MD="$TEMP_DIR/cli_auto.md" readonly TEMP_WITH_FRONTMATTER="$TEMP_DIR/cli_final.md" -# Configuration per CLI (compatible with bash 3.2+) -case "$CLI_NAME" in - aztec) - DISPLAY_NAME="Aztec CLI Reference" - TITLE="Aztec CLI Reference" - OUTPUT_FILE="aztec_cli_reference.md" - SIDEBAR_POSITION="1" - COMMAND="aztec" - ;; - aztec-wallet) - DISPLAY_NAME="Aztec Wallet CLI Reference" - TITLE="Aztec Wallet CLI Reference" - OUTPUT_FILE="aztec_wallet_cli_reference.md" - SIDEBAR_POSITION="2" - COMMAND="aztec-wallet" - ;; -esac - -echo "=== ${DISPLAY_NAME} Documentation Update Script ===" +echo "=== ${CLI_DISPLAY_NAME} Documentation Update Script ===" echo "" # Step 1: Generate JSON from CLI -echo "Step 1: Scanning ${COMMAND} CLI commands..." -python3 "$SCRIPT_DIR/scan_cli.py" --command "$COMMAND" --output "$TEMP_JSON" +echo "Step 1: Scanning ${CLI_COMMAND} CLI commands (format: ${CLI_FORMAT})..." +python3 "$SCRIPT_DIR/scan_cli.py" --command "$CLI_COMMAND" --cli-format "$CLI_FORMAT" --output "$TEMP_JSON" # Step 2: Generate markdown echo "" @@ -133,26 +120,20 @@ echo "Step 2: Generating markdown documentation..." python3 "$SCRIPT_DIR/transform_to_markdown.py" \ --input "$TEMP_JSON" \ --output "$TEMP_MD" \ - --title "$TITLE" + --title "$CLI_TITLE" # Step 3: Add front-matter - -# Determine tags based on CLI -TAGS="[cli, reference, autogenerated" -[[ "$CLI_NAME" == "aztec-wallet" ]] && TAGS+=", wallet" -TAGS+="]" - cat > "$TEMP_WITH_FRONTMATTER" << EOF --- -title: ${TITLE} -description: Comprehensive auto-generated reference for the ${DISPLAY_NAME} command-line interface with all commands and options. -tags: ${TAGS} -sidebar_position: ${SIDEBAR_POSITION} +title: ${CLI_TITLE} +description: Comprehensive auto-generated reference for the ${CLI_DISPLAY_NAME} command-line interface with all commands and options. +tags: ${CLI_TAGS} +sidebar_position: ${CLI_SIDEBAR_POSITION} --- -# ${TITLE} +# ${CLI_TITLE} -*This documentation is auto-generated from the \`${COMMAND}\` CLI help output.* +*This documentation is auto-generated from the \`${CLI_COMMAND}\` CLI help output.* EOF @@ -170,7 +151,7 @@ if [[ -n "$OUTPUT_DIR" ]]; then mkdir -p "$OUTPUT_DIR" fi - output_file="$OUTPUT_DIR/$OUTPUT_FILE" + output_file="$OUTPUT_DIR/$CLI_OUTPUT_FILE" cp "$TEMP_WITH_FRONTMATTER" "$output_file" echo " ✓ Written to: $output_file" echo "" @@ -197,7 +178,7 @@ update_version() { return 1 fi - cp "$TEMP_WITH_FRONTMATTER" "$target_dir/$OUTPUT_FILE" + cp "$TEMP_WITH_FRONTMATTER" "$target_dir/$CLI_OUTPUT_FILE" echo " ✓ Updated $version" return 0 } @@ -207,18 +188,27 @@ case "$TARGET_VERSION" in # Update main docs update_version "current" || true - # Update all versioned docs - if [[ -f "$DOCS_ROOT/versions.json" ]]; then + # Update all versioned docs (check both versions.json and developer_versions.json) + VERSIONS_FILE="" + if [[ -f "$DOCS_ROOT/developer_versions.json" ]]; then + VERSIONS_FILE="$DOCS_ROOT/developer_versions.json" + elif [[ -f "$DOCS_ROOT/versions.json" ]]; then + VERSIONS_FILE="$DOCS_ROOT/versions.json" + fi + + if [[ -n "$VERSIONS_FILE" ]]; then # Use jq if available for robust JSON parsing, otherwise fallback to grep if command -v jq &> /dev/null; then - versions=$(jq -r '.[]' "$DOCS_ROOT/versions.json") + versions=$(jq -r '.[]' "$VERSIONS_FILE") else - versions=$(grep -o '"v[^"]*"' "$DOCS_ROOT/versions.json" | tr -d '"') + versions=$(grep -o '"v[^"]*"' "$VERSIONS_FILE" | tr -d '"') fi for version in $versions; do update_version "$version" || true done + else + echo " Warning: No versions.json found, skipping versioned docs" fi ;; current) @@ -233,5 +223,5 @@ esac echo "" echo "=== Documentation Update Complete ===" echo "" -echo "Files updated with auto-generated ${DISPLAY_NAME} reference." +echo "Files updated with auto-generated ${CLI_DISPLAY_NAME} reference." echo "You can now commit these changes to the repository."