From ca1e7308590304f6f6a6045f345f8afb7b228632 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Wed, 26 Feb 2025 19:31:16 +0000 Subject: [PATCH 001/332] Corrected optional monitoring containers on README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 39762a67..9b7e89d5 100644 --- a/README.md +++ b/README.md @@ -126,7 +126,7 @@ Optional variables: - `TG_API_KEY` - Telegram API key - `TG_CHAT_ID` - Telegram chat ID -- `MONITORING_CONTAINERS` - will enable monitoring containers (`filebeat`, `cadvisor`, `prometheus`) +- `MONITORING_CONTAINERS` - will enable monitoring containers (`cadvisor`, `node-exporter`) #### Node initialization from backup From 3e84f9634b62005f608d7f24a275c5abce397a5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Wed, 26 Feb 2025 19:42:18 +0000 Subject: [PATCH 002/332] Updated .env variables in README --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 9b7e89d5..a739f119 100644 --- a/README.md +++ b/README.md @@ -116,8 +116,8 @@ You should specify the following environment variables: - `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use - `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use - `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS_ABI_URL` - URL to SKALE Manager contracts ABI and addresses -- `IMA_CONTRACTS_ABI_URL` - URL to IMA contracts ABI and addresses +- `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` - SKALE Manager main contract alias or address +- `IMA_CONTRACTS_ALIAS_OR_ADDRESS` - IMA main contract alias or address - `FILEBEAT_URL` - URL to the Filebeat log server - `ENV_TYPE` - environement type (mainnet, testnet, etc) @@ -550,8 +550,8 @@ You should specify the following environment variables: - `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use - `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use - `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS_ABI_URL` - URL to SKALE Manager contracts ABI and addresses -- `IMA_CONTRACTS_ABI_URL` - URL to IMA contracts ABI and addresses +- `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` - SKALE Manager main contract alias or address +- `IMA_CONTRACTS_ALIAS_OR_ADDRESS` - IMA main contract alias or address - `SCHAIN_NAME` - name of the SKALE chain to sync - `ENV_TYPE` - environement type (mainnet, testnet, etc) From 6c724c91a2e55a74813ee989d7760de3827a0f9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 27 Feb 2025 13:44:25 +0000 Subject: [PATCH 003/332] Updated README to be more clear and up to date. --- README.md | 186 ++++++++++++++++++++++++++---------------------------- 1 file changed, 91 insertions(+), 95 deletions(-) diff --git a/README.md b/README.md index a739f119..f0556225 100644 --- a/README.md +++ b/README.md @@ -8,30 +8,30 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line ## Table of Contents -1. [Installation](#installation) -2. [CLI usage](#cli-usage) - 2.1 [Top level commands](#top-level-commands) - 2.2 [Node](#node-commands) - 2.3 [Wallet](#wallet-commands) - 2.4 [sChains](#schain-commands) - 2.5 [Health](#health-commands) - 2.6 [SSL](#ssl-commands) - 2.7 [Logs](#logs-commands) - 2.8 [Resources allocation](#resources-allocation-commands) - 2.9 [Validate](#validate-commands) -3. [Sync CLI usage](#sync-cli-usage) - 3.1 [Top level commands](#top-level-commands-sync) - 3.2 [Sync node commands](#sync-node-commands) -4. [Exit codes](#exit-codes) -5. [Development](#development) +1. [Installation](#installation) +2. [CLI usage](#cli-usage) + 2.1 [Top level commands](#top-level-commands) + 2.2 [Node](#node-commands) + 2.3 [Wallet](#wallet-commands) + 2.4 [sChains](#schain-commands) + 2.5 [Health](#health-commands) + 2.6 [SSL](#ssl-commands) + 2.7 [Logs](#logs-commands) + 2.8 [Resources allocation](#resources-allocation-commands) + 2.9 [Validate](#validate-commands) +3. [Sync CLI usage](#sync-cli-usage) + 3.1 [Top level commands](#top-level-commands-sync) + 3.2 [Sync node commands](#sync-node-commands) +4. [Exit codes](#exit-codes) +5. [Development](#development) ## Installation -- Prerequisites +- Prerequisites Ensure that the following package is installed: **docker**, **docker-compose** (1.27.4+) -- Download the executable +- Download the executable ```shell VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m` > /usr/local/bin/skale" @@ -43,13 +43,13 @@ For Sync node version: VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-sync > /usr/local/bin/skale" ``` -- Apply executable permissions to the downloaded binary: +- Apply executable permissions to the downloaded binary: ```shell chmod +x /usr/local/bin/skale ``` -- Test the installation +- Test the installation ```shell skale --help @@ -77,7 +77,7 @@ skale version Options: -- `--short` - prints version only, without additional text. +- `--short` - prints version only, without additional text. ### Node commands @@ -99,7 +99,7 @@ Options: Initialize a SKALE node on current machine -> :warning: **Please avoid re-initialization**: First run `skale node info` to confirm current state of intialization. +> :warning: **Avoid re-initializing a node that’s already initialized**: Run `skale node info` first to confirm the current initialization state. ```shell skale node init [ENV_FILE] @@ -111,22 +111,26 @@ Arguments: You should specify the following environment variables: -- `SGX_SERVER_URL` - SGX server URL -- `DISK_MOUNTPOINT` - disk mount point for storing sChains data -- `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use -- `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use -- `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` - SKALE Manager main contract alias or address -- `IMA_CONTRACTS_ALIAS_OR_ADDRESS` - IMA main contract alias or address -- `FILEBEAT_URL` - URL to the Filebeat log server -- `ENV_TYPE` - environement type (mainnet, testnet, etc) +- `SGX_SERVER_URL` - SGX server URL +- `DISK_MOUNTPOINT` - disk mount point for storing sChains data +- `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use +- `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use +- `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed +- `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` - SKALE Manager main contract alias or address +- `IMA_CONTRACTS_ALIAS_OR_ADDRESS` - IMA main contract alias or address +- `FILEBEAT_URL` - URL to the Filebeat log server +- `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') +> In `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` and `IMA_CONTRACTS_ALIAS_OR_ADDRESS` pass a recognized alias (e.g., 'Mainnet', 'Holesky', 'local') or a direct contract address. +> :warning: If the alias you pass is not recognized by underlying skale library, you must provide a direct contract address. Optional variables: -- `TG_API_KEY` - Telegram API key -- `TG_CHAT_ID` - Telegram chat ID -- `MONITORING_CONTAINERS` - will enable monitoring containers (`cadvisor`, `node-exporter`) +- `TG_API_KEY` - Telegram API key +- `TG_CHAT_ID` - Telegram chat ID +- `MONITORING_CONTAINERS` - will enable monitoring containers (`cadvisor`, `node-exporter`). + +> Filebeat is always enabled and requires `FILEBEAT_URL`, it is **not optional**. #### Node initialization from backup @@ -153,7 +157,6 @@ Arguments: - `BACKUP_FOLDER_PATH` - path to the folder where the backup file will be saved - #### Node Registration ```shell @@ -162,13 +165,13 @@ skale node register Required arguments: -- `--ip` - public IP for RPC connections and consensus -- `--domain`/`-d` - SKALE node domain name -- `--name` - SKALE node name +- `--ip` - public IP for RPC connections and consensus +- `--domain`/`-d` - SKALE node domain name +- `--name` - SKALE node name Optional arguments: -- `--port` - public port - beginning of the port range for node SKALE Chains (default: `10000`) +- `--port` - public port - beginning of the port range for node SKALE Chains (default: `10000`) #### Node update @@ -180,7 +183,7 @@ skale node update [ENV_FILEPATH] Options: -- `--yes` - update without additional confirmation +- `--yes` - update without additional confirmation Arguments: @@ -199,8 +202,8 @@ skale node turn-off Options: -- `--maintenance-on` - set SKALE node into maintenance mode before turning off -- `--yes` - turn off without additional confirmation +- `--maintenance-on` - set SKALE node into maintenance mode before turning off +- `--yes` - turn off without additional confirmation #### Node turn-on @@ -212,8 +215,8 @@ skale node turn-on [ENV_FILEPATH] Options: -- `--maintenance-off` - turn off maintenance mode after turning on the node -- `--yes` - turn on without additional confirmation +- `--maintenance-off` - turn off maintenance mode after turning on the node +- `--yes` - turn on without additional confirmation Arguments: @@ -232,7 +235,7 @@ skale node maintenance-on Options: -- `--yes` - set without additional confirmation +- `--yes` - set without additional confirmation Switch off maintenance mode @@ -251,7 +254,7 @@ skale node set-domain Options: - `--domain`/`-d` - SKALE node domain name -- `--yes` - set without additional confirmation +- `--yes` - set without additional confirmation ### Wallet commands @@ -287,8 +290,8 @@ skale wallet send [ADDRESS] [AMOUNT] Arguments: -- `ADDRESS` - Ethereum receiver address -- `AMOUNT` - Amount of ETH tokens to send +- `ADDRESS` - Ethereum receiver address +- `AMOUNT` - Amount of ETH tokens to send Optional arguments: @@ -330,7 +333,7 @@ skale schains info SCHAIN_NAME Options: -- `--json` - Show info in JSON format +- `--json` - Show info in JSON format #### SKALE Chain repair @@ -354,7 +357,7 @@ skale health containers Options: -- `-a/--all` - list all containers (by default - only running) +- `-a/--all` - list all containers (by default - only running) #### sChains healthchecks @@ -366,7 +369,7 @@ skale health schains Options: -- `--json` - Show data in JSON format +- `--json` - Show data in JSON format #### SGX @@ -407,16 +410,15 @@ skale ssl upload ##### Options -- `-c/--cert-path` - Path to the certificate file -- `-k/--key-path` - Path to the key file -- `-f/--force` - Overwrite existing certificates +- `-c/--cert-path` - Path to the certificate file +- `-k/--key-path` - Path to the key file +- `-f/--force` - Overwrite existing certificates Admin API URL: \[GET] `/api/ssl/upload` - #### Check ssl certificate -Check ssl certificate be connecting to healthcheck ssl server +Check SSL certificate by connecting to the health-check SSL server ```shell skale ssl check @@ -424,11 +426,11 @@ skale ssl check ##### Options -- `-c/--cert-path` - Path to the certificate file (default: uploaded using `skale ssl upload` certificate) -- `-k/--key-path` - Path to the key file (default: uploaded using `skale ssl upload` key) -- `--type/-t` - Check type (`openssl` - openssl cli check, `skaled` - skaled-based check, `all` - both) -- `--port/-p` - Port to start healthcheck server (defualt: `4536`) -- `--no-client` - Skip client connection (only make sure server started without errors) +- `-c/--cert-path` - Path to the certificate file (default: uploaded using `skale ssl upload` certificate) +- `-k/--key-path` - Path to the key file (default: uploaded using `skale ssl upload` key) +- `--type/-t` - Check type (`openssl` - openssl cli check, `skaled` - skaled-based check, `all` - both) +- `--port/-p` - Port to start healthcheck server (default: `4536`) +- `--no-client` - Skip client connection (only make sure server started without errors) ### Logs commands @@ -444,7 +446,7 @@ skale logs cli Options: -- `--debug` - show debug logs; more detailed output +- `--debug` - show debug logs; more detailed output #### Dump Logs @@ -456,8 +458,7 @@ skale logs dump [PATH] Optional arguments: -- `--container`, `-c` - Dump logs only from specified container - +- `--container`, `-c` - Dump logs only from specified container ### Resources allocation commands @@ -470,6 +471,7 @@ Show resources allocation file: ```shell skale resources-allocation show ``` + #### Generate/update Generate/update allocation file: @@ -484,8 +486,8 @@ Arguments: Options: -- `--yes` - generate without additional confirmation -- `-f/--force` - rewrite allocation file if it exists +- `--yes` - generate without additional confirmation +- `-f/--force` - rewrite allocation file if it exists ### Validate commands @@ -501,11 +503,12 @@ skale validate abi Options: -- `--json` - show validation result in json format - +- `--json` - show validation result in json format ## Sync CLI usage +A sync node is a node dedicated to synchronizing a single sChain. + ### Top level commands sync #### Info @@ -526,7 +529,7 @@ skale version Options: -- `--short` - prints version only, without additional text. +- `--short` - prints version only, without additional text. ### Sync node commands @@ -546,20 +549,22 @@ Arguments: You should specify the following environment variables: -- `DISK_MOUNTPOINT` - disk mount point for storing sChains data -- `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use -- `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use -- `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` - SKALE Manager main contract alias or address -- `IMA_CONTRACTS_ALIAS_OR_ADDRESS` - IMA main contract alias or address -- `SCHAIN_NAME` - name of the SKALE chain to sync -- `ENV_TYPE` - environement type (mainnet, testnet, etc) - +- `DISK_MOUNTPOINT` - disk mount point for storing sChains data +- `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use +- `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use +- `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed +- `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` - SKALE Manager main contract alias or address +- `IMA_CONTRACTS_ALIAS_OR_ADDRESS` - IMA main contract alias or address +- `SCHAIN_NAME` - name of the SKALE chain to sync +- `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') + +> In `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` and `IMA_CONTRACTS_ALIAS_OR_ADDRESS` pass a recognized alias (e.g., 'Mainnet', 'Holesky', 'local') or a direct contract address. +> :warning: If the alias you pass is not recognized by underlying skale library, you must provide a direct contract address. Options: -- `--archive` - Run sync node in an archive node (disable block rotation) -- `--historic-state` - Enable historic state (works only in pair with --archive flag) +- `--archive` - Run sync node in an archive node (disable block rotation) +- `--historic-state` - Enable historic state (works only in pair with --archive flag) #### Sync node update @@ -585,12 +590,12 @@ Exit codes conventions for SKALE CLI tools - `0` - Everything is OK - `1` - General error exit code -- `3` - Bad API response** -- `4` - Script execution error** -- `5` - Transaction error* -- `6` - Revert error* -- `7` - Bad user error** -- `8` - Node state error** +- `3` - Bad API response\*\* +- `4` - Script execution error\*\* +- `5` - Transaction error\* +- `6` - Revert error\* +- `7` - Bad user error\*\* +- `8` - Node state error\*\* `*` - `validator-cli` only `**` - `node-cli` only @@ -622,15 +627,6 @@ Run commands in dev mode: ENV=dev python main.py YOUR_COMMAND ``` -### Setting up Travis - -Required environment variables: - -- `ACCESS_KEY_ID` - DO Spaces/AWS S3 API Key ID -- `SECRET_ACCESS_KEY` - DO Spaces/AWS S3 Secret access key -- `GITHUB_EMAIL` - Email of GitHub user -- `GITHUB_OAUTH_TOKEN` - GitHub auth token - ## Contributing **If you have any questions please ask our development community on [Discord](https://discord.gg/vvUtWJB).** From 25186a5d633d1701c82261569a2183a2dbd7d3c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= <44880399+Masterix0@users.noreply.github.com> Date: Thu, 27 Feb 2025 16:44:22 +0000 Subject: [PATCH 004/332] Add files via upload Added sample info.py file to fix pylance error since file doesn't existe. File will be overwritten at build time. --- node_cli/cli/info.py | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 node_cli/cli/info.py diff --git a/node_cli/cli/info.py b/node_cli/cli/info.py new file mode 100644 index 00000000..6d38d34f --- /dev/null +++ b/node_cli/cli/info.py @@ -0,0 +1,6 @@ +BUILD_DATETIME = '' +COMMIT = '' +BRANCH = '' +OS = '' +VERSION = '' +TYPE = '' From 7fe7b5f70e7356d067e475d2e579e829fd0594f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 27 Feb 2025 16:45:45 +0000 Subject: [PATCH 005/332] Updated build script to make sure info.py file behaves like before. --- scripts/build.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/build.sh b/scripts/build.sh index 3f334169..16ff5897 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -38,6 +38,7 @@ LATEST_COMMIT=$(git rev-parse HEAD) CURRENT_DATETIME="`date "+%Y-%m-%d %H:%M:%S"`"; DIST_INFO_FILEPATH=$PARENT_DIR/node_cli/cli/info.py +rm $DIST_INFO_FILEPATH touch $DIST_INFO_FILEPATH echo "BUILD_DATETIME = '$CURRENT_DATETIME'" > $DIST_INFO_FILEPATH From c5ba84deb39e7790eace3a1b18aabb471db8de7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 27 Feb 2025 18:51:21 +0000 Subject: [PATCH 006/332] Fixed erroneous information in README --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index f0556225..6b106900 100644 --- a/README.md +++ b/README.md @@ -121,8 +121,8 @@ You should specify the following environment variables: - `FILEBEAT_URL` - URL to the Filebeat log server - `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') -> In `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` and `IMA_CONTRACTS_ALIAS_OR_ADDRESS` pass a recognized alias (e.g., 'Mainnet', 'Holesky', 'local') or a direct contract address. -> :warning: If the alias you pass is not recognized by underlying skale library, you must provide a direct contract address. +> In `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` and `IMA_CONTRACTS_ALIAS_OR_ADDRESS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). +> :warning: If you are using a custom network or a contract which isn't recognized by underlying skale library, you **MUST** provide a direct contract address. Optional variables: @@ -558,8 +558,8 @@ You should specify the following environment variables: - `SCHAIN_NAME` - name of the SKALE chain to sync - `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') -> In `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` and `IMA_CONTRACTS_ALIAS_OR_ADDRESS` pass a recognized alias (e.g., 'Mainnet', 'Holesky', 'local') or a direct contract address. -> :warning: If the alias you pass is not recognized by underlying skale library, you must provide a direct contract address. +> In `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` and `IMA_CONTRACTS_ALIAS_OR_ADDRESS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). +> :warning: If you are using a custom network or a contract which isn't recognized by underlying skale library, you **MUST** provide a direct contract address. Options: From 7754708e40bba27c1093b095437987b83e7891bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 27 Feb 2025 19:21:11 +0000 Subject: [PATCH 007/332] Add skale-contracts dependency to setup.py --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f335705d..dc571af4 100644 --- a/setup.py +++ b/setup.py @@ -72,7 +72,8 @@ def find_version(*file_paths): "cryptography==42.0.4", "filelock==3.0.12", 'sh==1.14.2', - 'python-crontab==2.6.0' + 'python-crontab==2.6.0', + 'skale-contracts==1.0.1' ], python_requires='>=3.8,<4', extras_require=extras_require, From d5d7291bc6c2b00532e1356bf17b06e2ac39f945 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Wed, 5 Mar 2025 15:53:04 +0000 Subject: [PATCH 008/332] Refactor node-cli to use skale-contracts for contract alias/address validation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit • Introduced dynamic fetching of contract ABIs from skale-contracts by allowing either an alias (e.g., “test-manager”) or a direct contract address (e.g., “0xabc123…”) in the environment. • Renamed and replaced the old environment variables that directly pointed to local ABI files. • Created new validation logic (validate_env_alias_or_address) for both Manager and IMA contracts in the CLI. • Removed the obsolete tests that ensured local ABI files existed/were valid JSON and replaced them with new tests that check for valid aliases or addresses. • Updated existing tests and mocks to patch or skip the new external validation calls where necessary. • Added ruff configuration (ruff.toml) and reformatted code throughout the repository. --- .github/workflows/test.yml | 4 + .gitignore | 4 +- README.md | 19 +- node_cli/cli/node.py | 152 +++++----- node_cli/cli/validate.py | 43 --- node_cli/configs/__init__.py | 13 +- node_cli/configs/env.py | 220 ++++++++++++-- node_cli/core/host.py | 163 ++++++----- node_cli/core/node.py | 38 ++- node_cli/core/resources.py | 49 ++-- node_cli/main.py | 20 +- node_cli/operations/base.py | 94 ++---- node_cli/operations/common.py | 25 +- node_cli/operations/docker_lvmpy.py | 16 +- node_cli/operations/skale_node.py | 49 ++-- node_cli/utils/decorators.py | 9 +- node_cli/utils/git_utils.py | 87 +++++- node_cli/utils/helper.py | 175 ++++++----- node_cli/utils/meta.py | 32 +- node_cli/utils/print_formatters.py | 199 ++++++------- ruff.toml | 4 + setup.py | 71 +++-- tests/cli/node_test.py | 116 +++++--- tests/cli/resources_allocation_test.py | 48 ++- tests/cli/sync_node_test.py | 91 +++--- tests/cli/validate_test.py | 67 ----- tests/configs/configs_env_validate_test.py | 323 +++++++++++++++++++++ tests/configs_env_test.py | 15 - tests/conftest.py | 105 ++++--- tests/core/core_node_test.py | 62 ++-- tests/test-env | 11 +- 31 files changed, 1380 insertions(+), 944 deletions(-) delete mode 100644 node_cli/cli/validate.py create mode 100644 ruff.toml delete mode 100644 tests/cli/validate_test.py create mode 100644 tests/configs/configs_env_validate_test.py delete mode 100644 tests/configs_env_test.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 31625095..cdd5afdd 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -34,6 +34,10 @@ jobs: run: | flake8 . + - name: Check with ruff + run: | + ruff check + - name: Build binary - normal run: | mkdir -p ./dist diff --git a/.gitignore b/.gitignore index de02cf68..9ce8b81c 100644 --- a/.gitignore +++ b/.gitignore @@ -119,4 +119,6 @@ resource_allocation.json conf.json test-env -nginx.conf \ No newline at end of file +nginx.conf +tests/.skale/node_data/docker.json +tests/.skale/node_data/node_options.json diff --git a/README.md b/README.md index 6b106900..d1235456 100644 --- a/README.md +++ b/README.md @@ -17,8 +17,7 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 2.5 [Health](#health-commands) 2.6 [SSL](#ssl-commands) 2.7 [Logs](#logs-commands) - 2.8 [Resources allocation](#resources-allocation-commands) - 2.9 [Validate](#validate-commands) + 2.8 [Resources allocation](#resources-allocation-commands) 3. [Sync CLI usage](#sync-cli-usage) 3.1 [Top level commands](#top-level-commands-sync) 3.2 [Sync node commands](#sync-node-commands) @@ -489,22 +488,6 @@ Options: - `--yes` - generate without additional confirmation - `-f/--force` - rewrite allocation file if it exists -### Validate commands - -> Prefix: `skale validate` - -#### Validate abi - -Check whether ABI files contain valid JSON data - -```shell -skale validate abi -``` - -Options: - -- `--json` - show validation result in json format - ## Sync CLI usage A sync node is a node dedicated to synchronizing a single sChain. diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 8eee2d96..ebddc5c8 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -33,17 +33,12 @@ turn_on, get_node_info, set_domain_name, - run_checks + run_checks, ) from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.configs.env import ALLOWED_ENV_TYPES from node_cli.utils.decorators import check_inited -from node_cli.utils.helper import ( - abort_if_false, - safe_load_texts, - streamed_cmd, - IP_TYPE -) +from node_cli.utils.helper import abort_if_false, safe_load_texts, streamed_cmd, IP_TYPE from node_cli.utils.meta import get_meta_info from node_cli.utils.print_formatters import print_meta_info @@ -56,48 +51,35 @@ def node_cli(): pass -@node_cli.group(help="SKALE node commands") +@node_cli.group(help='SKALE node commands') def node(): pass -@node.command('info', help="Get info about SKALE node") +@node.command('info', help='Get info about SKALE node') @click.option('--format', '-f', type=click.Choice(['json', 'text'])) def node_info(format): get_node_info(format) -@node.command('register', help="Register current node in the SKALE Manager") -@click.option( - '--name', '-n', - required=True, - prompt="Enter node name", - help='SKALE node name' -) +@node.command('register', help='Register current node in the SKALE Manager') +@click.option('--name', '-n', required=True, prompt='Enter node name', help='SKALE node name') @click.option( '--ip', - prompt="Enter node public IP", + prompt='Enter node public IP', type=IP_TYPE, - help='Public IP for RPC connections & consensus (required)' + help='Public IP for RPC connections & consensus (required)', ) @click.option( - '--port', '-p', - default=DEFAULT_NODE_BASE_PORT, - type=int, - help='Base port for node sChains' -) -@click.option( - '--domain', '-d', - prompt="Enter node domain name", - type=str, - help='Node domain name' + '--port', '-p', default=DEFAULT_NODE_BASE_PORT, type=int, help='Base port for node sChains' ) +@click.option('--domain', '-d', prompt='Enter node domain name', type=str, help='Node domain name') @streamed_cmd def register_node(name, ip, port, domain): register(name, ip, ip, port, domain) -@node.command('init', help="Initialize SKALE node") +@node.command('init', help='Initialize SKALE node') @click.argument('env_file') @streamed_cmd def init_node(env_file): @@ -105,17 +87,15 @@ def init_node(env_file): @node.command('update', help='Update node from .env file') -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to update SKALE node software?') -@click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) @click.option( - '--unsafe', - 'unsafe_ok', - help='Allow unsafe update', - hidden=True, - is_flag=True + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to update SKALE node software?', ) +@click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) +@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) @click.argument('env_file') @streamed_cmd def update_node(env_file, pull_config_for_schain, unsafe_ok): @@ -129,43 +109,44 @@ def signature(validator_id): print(f'Signature: {res}') -@node.command('backup', help="Generate backup file to restore SKALE node on another machine") +@node.command('backup', help='Generate backup file to restore SKALE node on another machine') @click.argument('backup_folder_path') @streamed_cmd def backup_node(backup_folder_path): backup(backup_folder_path) -@node.command('restore', help="Restore SKALE node on another machine") +@node.command('restore', help='Restore SKALE node on another machine') @click.argument('backup_path') @click.argument('env_file') @click.option( - '--no-snapshot', - help='Do not restore sChains from snapshot', - is_flag=True, - hidden=True + '--no-snapshot', help='Do not restore sChains from snapshot', is_flag=True, hidden=True ) @click.option( '--config-only', help='Only restore configuration files in .skale and artifacts', is_flag=True, - hidden=True + hidden=True, ) @streamed_cmd def restore_node(backup_path, env_file, no_snapshot, config_only): restore(backup_path, env_file, no_snapshot, config_only) -@node.command('maintenance-on', help="Set SKALE node into maintenance mode") -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to set SKALE node into maintenance mode?') +@node.command('maintenance-on', help='Set SKALE node into maintenance mode') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to set SKALE node into maintenance mode?', +) @streamed_cmd def set_node_in_maintenance(): set_maintenance_mode_on() -@node.command('maintenance-off', help="Remove SKALE node from maintenance mode") +@node.command('maintenance-off', help='Remove SKALE node from maintenance mode') @streamed_cmd def remove_node_from_maintenance(): set_maintenance_mode_off() @@ -173,20 +154,16 @@ def remove_node_from_maintenance(): @node.command('turn-off', help='Turn off the node') @click.option( - '--maintenance-on', - help='Set SKALE node into maintenance mode before turning off', - is_flag=True + '--maintenance-on', help='Set SKALE node into maintenance mode before turning off', is_flag=True ) -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to turn off the node?') @click.option( - '--unsafe', - 'unsafe_ok', - help='Allow unsafe turn-off', - hidden=True, - is_flag=True + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to turn off the node?', ) +@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe turn-off', hidden=True, is_flag=True) @streamed_cmd def _turn_off(maintenance_on, unsafe_ok): turn_off(maintenance_on, unsafe_ok) @@ -194,35 +171,36 @@ def _turn_off(maintenance_on, unsafe_ok): @node.command('turn-on', help='Turn on the node') @click.option( - '--maintenance-off', - help='Turn off maintenance mode after turning on the node', - is_flag=True + '--maintenance-off', help='Turn off maintenance mode after turning on the node', is_flag=True ) @click.option( '--sync-schains', help='Run all sChains in the snapshot download mode', is_flag=True, - hidden=True + hidden=True, +) +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to turn on the node?', ) -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to turn on the node?') @click.argument('env_file') @streamed_cmd def _turn_on(maintenance_off, sync_schains, env_file): turn_on(maintenance_off, sync_schains, env_file) -@node.command('set-domain', help="Set node domain name") +@node.command('set-domain', help='Set node domain name') +@click.option('--domain', '-d', prompt='Enter node domain name', type=str, help='Node domain name') @click.option( - '--domain', '-d', - prompt="Enter node domain name", - type=str, - help='Node domain name' + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to set domain name?', ) -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to set domain name?') @streamed_cmd def _set_domain_name(domain): set_domain_name(domain) @@ -230,10 +208,11 @@ def _set_domain_name(domain): @node.command(help='Check if node meet network requirements') @click.option( - '--network', '-n', + '--network', + '-n', type=click.Choice(ALLOWED_ENV_TYPES), default='mainnet', - help='Network to check' + help='Network to check', ) def check(network): run_checks(network) @@ -241,21 +220,20 @@ def check(network): @node.command(help='Reconfigure nftables rules') @click.option('--monitoring', is_flag=True) -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to reconfigure firewall rules?') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to reconfigure firewall rules?', +) def configure_firewall(monitoring): configure_firewall_rules(enable_monitoring=monitoring) @node.command(help='Show node version information') @check_inited -@click.option( - '--json', - 'raw', - is_flag=True, - help=TEXTS['common']['json'] -) +@click.option('--json', 'raw', is_flag=True, help=TEXTS['common']['json']) def version(raw: bool) -> None: meta_info = get_meta_info(raw=raw) if raw: diff --git a/node_cli/cli/validate.py b/node_cli/cli/validate.py deleted file mode 100644 index f8134df9..00000000 --- a/node_cli/cli/validate.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of node-cli -# -# Copyright (C) 2019 SKALE Labs -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import click - -from node_cli.core.host import validate_abi_files - - -@click.group() -def validate_cli(): - pass - - -@validate_cli.group(help="Validation commands") -def validate(): - pass - - -@validate.command('abi', help="Validate contracts abi") -@click.option( - '--json', - 'json_format', - help='Show result in JSON format', - is_flag=True -) -def abi(json_format): - validate_abi_files(json_result=json_format) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 19d9e000..36363253 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -24,8 +24,7 @@ GLOBAL_SKALE_DIR = os.getenv('GLOBAL_SKALE_DIR') or '/etc/skale' GLOBAL_SKALE_CONF_FILENAME = 'conf.json' -GLOBAL_SKALE_CONF_FILEPATH = os.path.join( - GLOBAL_SKALE_DIR, GLOBAL_SKALE_CONF_FILENAME) +GLOBAL_SKALE_CONF_FILEPATH = os.path.join(GLOBAL_SKALE_DIR, GLOBAL_SKALE_CONF_FILENAME) GLOBAL_CONFIG = read_g_config(GLOBAL_SKALE_DIR, GLOBAL_SKALE_CONF_FILEPATH) G_CONF_USER = GLOBAL_CONFIG['user'] @@ -65,8 +64,7 @@ LOG_PATH = os.path.join(NODE_DATA_PATH, 'log') REMOVED_CONTAINERS_FOLDER_NAME = '.removed_containers' -REMOVED_CONTAINERS_FOLDER_PATH = os.path.join( - LOG_PATH, REMOVED_CONTAINERS_FOLDER_NAME) +REMOVED_CONTAINERS_FOLDER_PATH = os.path.join(LOG_PATH, REMOVED_CONTAINERS_FOLDER_NAME) ETH_STATE_PATH = os.path.join(NODE_DATA_PATH, 'eth-state') NODE_CERTS_PATH = os.path.join(NODE_DATA_PATH, 'ssl') @@ -105,7 +103,7 @@ def _get_env(): try: - sys._MEIPASS + sys._MEIPASS # type: ignore except AttributeError: return 'dev' return 'prod' @@ -118,7 +116,7 @@ def _get_env(): PARDIR = os.path.join(CURRENT_FILE_LOCATION, os.pardir) PROJECT_DIR = os.path.join(PARDIR, os.pardir) else: - PARDIR = os.path.join(sys._MEIPASS, 'data') + PARDIR = os.path.join(sys._MEIPASS, 'data') # type: ignore PROJECT_DIR = PARDIR TEXT_FILE = os.path.join(PROJECT_DIR, 'text.yml') @@ -141,9 +139,6 @@ def _get_env(): TM_INIT_TIMEOUT = 20 RESTORE_SLEEP_TIMEOUT = 20 -MANAGER_CONTRACTS_FILEPATH = os.path.join(CONTRACTS_PATH, 'manager.json') -IMA_CONTRACTS_FILEPATH = os.path.join(CONTRACTS_PATH, 'ima.json') - META_FILEPATH = os.path.join(NODE_DATA_PATH, 'meta.json') SKALE_NODE_REPO_URL = 'https://github.com/skalenetwork/skale-node.git' diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 7b6bf116..6ee99022 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -1,19 +1,37 @@ +"""Environment configuration and validation module for SKALE node. + +This module handles environment variable loading, validation, and configuration +for SKALE node setup. It ensures all required parameters are present and valid. +""" + import os +from typing import Dict, List, Optional from dotenv import load_dotenv -from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH +import requests +from enum import Enum +from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH +from node_cli.utils.helper import error_exit +from node_cli.utils.exit_codes import CLIExitCodes SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') - ALLOWED_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet'] -REQUIRED_PARAMS = { + +class ContractType(Enum): + """Contract types supported by the system with skale-contracts integration.""" + + IMA = 'mainnet-ima' + MANAGER = 'skale-manager' + + +REQUIRED_PARAMS: Dict[str, str] = { 'CONTAINER_CONFIGS_STREAM': '', 'ENDPOINT': '', - 'MANAGER_CONTRACTS_ABI_URL': '', - 'IMA_CONTRACTS_ABI_URL': '', + 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS': '', + 'IMA_CONTRACTS_ALIAS_OR_ADDRESS': '', 'FILEBEAT_HOST': '', 'DISK_MOUNTPOINT': '', 'SGX_SERVER_URL': '', @@ -21,18 +39,18 @@ 'ENV_TYPE': '', } -REQUIRED_PARAMS_SYNC = { +REQUIRED_PARAMS_SYNC: Dict[str, str] = { 'SCHAIN_NAME': '', 'CONTAINER_CONFIGS_STREAM': '', 'ENDPOINT': '', - 'MANAGER_CONTRACTS_ABI_URL': '', - 'IMA_CONTRACTS_ABI_URL': '', + 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS': '', + 'IMA_CONTRACTS_ALIAS_OR_ADDRESS': '', 'DISK_MOUNTPOINT': '', 'DOCKER_LVMPY_STREAM': '', - 'ENV_TYPE': '' + 'ENV_TYPE': '', } -OPTIONAL_PARAMS = { +OPTIONAL_PARAMS: Dict[str, str] = { 'MONITORING_CONTAINERS': '', 'TELEGRAF': '', 'INFLUX_TOKEN': '', @@ -45,36 +63,180 @@ 'DEFAULT_GAS_PRICE_WEI': '', 'SKIP_DOCKER_CONFIG': '', 'ENFORCE_BTRFS': '', - 'SKIP_DOCKER_CLEANUP': '' + 'SKIP_DOCKER_CLEANUP': '', } -def absent_params(params): - return list(filter( - lambda key: key not in OPTIONAL_PARAMS and not params[key], - params) - ) +def absent_params(params: Dict[str, str]) -> List[str]: + """Return a list of required parameters that are missing or empty.""" + return [key for key in params if key not in OPTIONAL_PARAMS and not params[key]] + + +def get_env_config( + env_filepath: str = SKALE_DIR_ENV_FILEPATH, sync_node: bool = False +) -> Dict[str, str]: + """Load and validate environment configuration from a file.""" + load_env_file(env_filepath) + params = build_params(sync_node) + populate_params(params) + validate_params(params) + return params -def get_env_config(env_filepath: str = SKALE_DIR_ENV_FILEPATH, sync_node: bool = False): - load_dotenv(dotenv_path=env_filepath) +def load_env_file(env_filepath: str) -> None: + """Check and load environment variables from the given file.""" + if not os.path.exists(env_filepath): + error_exit(f'Environment file not found: {env_filepath}', CLIExitCodes.FAILURE) + if not os.access(env_filepath, os.R_OK): + error_exit(f'Cannot read environment file: {env_filepath}', CLIExitCodes.FAILURE) + if not load_dotenv(dotenv_path=env_filepath): + error_exit(f'Failed to load environment from {env_filepath}', CLIExitCodes.FAILURE) + + +def build_params(sync_node: bool = False) -> Dict[str, str]: + """Return a dictionary of parameters based on node type.""" params = REQUIRED_PARAMS_SYNC.copy() if sync_node else REQUIRED_PARAMS.copy() params.update(OPTIONAL_PARAMS) - for option_name in params: - env_param = os.getenv(option_name) - if env_param is not None: - params[option_name] = str(env_param) - validate_params(params) return params -def validate_params(params): # todo: temporary fix - if params['ENV_TYPE'] not in ALLOWED_ENV_TYPES: - raise NotValidEnvParamsError( - f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. ' - f'Actual: "{params["ENV_TYPE"]}"' +def populate_params(params: Dict[str, str]) -> None: + """Populate params dictionary with environment variable values.""" + for key in params: + env_value = os.getenv(key) + if env_value is not None: + params[key] = str(env_value) + + +def validate_params(params: Dict[str, str]) -> None: + """Validate environment parameters.""" + missing = absent_params(params) + if missing: + error_exit(f'Missing required parameters: {missing}', CLIExitCodes.FAILURE) + validate_env_type(params['ENV_TYPE']) + # Get the endpoint explicitly from the params. + endpoint = params['ENDPOINT'] + validate_env_alias_or_address( + params['IMA_CONTRACTS_ALIAS_OR_ADDRESS'], ContractType.IMA, endpoint + ) + validate_env_alias_or_address( + params['MANAGER_CONTRACTS_ALIAS_OR_ADDRESS'], ContractType.MANAGER, endpoint + ) + + +def validate_env_type(env_type: str) -> None: + """Validate the environment type.""" + if env_type not in ALLOWED_ENV_TYPES: + error_exit( + f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. Actual: "{env_type}"', + CLIExitCodes.FAILURE, + ) + + +def validate_env_alias_or_address( + alias_or_address: str, contract_type: ContractType, endpoint: str +) -> None: + """Validate contract alias or address.""" + if not alias_or_address: + param_name = ( + 'IMA_CONTRACTS_ALIAS_OR_ADDRESS' + if contract_type == ContractType.IMA + else 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS' + ) + error_exit(f'{param_name} is not set', CLIExitCodes.FAILURE) + # If alias_or_address is 42 characters and starts with '0x', treat it as a contract address. + # TODO: Add a more robust check for contract address and see if doesn't conflict with alias. + if len(alias_or_address) == 42 and alias_or_address.startswith('0x'): + validate_contract_address(alias_or_address, endpoint) + else: + validate_contract_alias(alias_or_address, contract_type, endpoint) + + +def validate_contract_address(contract_address: str, endpoint: str) -> None: + """Validate if the given contract address has deployed code.""" + try: + response = requests.post( + endpoint, + json={ + 'jsonrpc': '2.0', + 'method': 'eth_getCode', + 'params': [contract_address, 'latest'], + 'id': 1, + }, ) + if response.status_code != 200: + error_exit( + f'Failed to verify contract at address {contract_address}', CLIExitCodes.FAILURE + ) + result = response.json().get('result') + if not result or result in ['0x', '0x0']: + error_exit( + f'No contract code found at address {contract_address}', CLIExitCodes.FAILURE + ) + except requests.RequestException as e: + error_exit(f'Failed to validate contract address: {str(e)}', CLIExitCodes.FAILURE) + + +def validate_contract_alias(alias: str, contract_type: ContractType, endpoint: str) -> None: + """Validate if the given contract alias exists in deployments for the current network.""" + try: + chain_id = get_chain_id(endpoint) + metadata = get_network_metadata() + networks = metadata.get('networks', []) + network_path: Optional[str] = None + for net in networks: + if net.get('chainId') == chain_id: + network_path = net.get('path') + break + if not network_path: + error_exit( + f'Network with chain ID {chain_id} not found in metadata', CLIExitCodes.FAILURE + ) + deployment_url = ( + f'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + f'refs/heads/deployments/{network_path}/{contract_type.value}/{alias}.json' + ) + if requests.get(deployment_url).status_code != 200: + error_exit( + f"Contract alias '{alias}' not found for {contract_type.value}", + CLIExitCodes.FAILURE, + ) + except requests.RequestException as e: + error_exit(f"Failed to validate contract alias '{alias}': {str(e)}", CLIExitCodes.FAILURE) + + +def get_chain_id(endpoint: str) -> int: + """Fetch chain ID from the JSON-RPC endpoint.""" + try: + response = requests.post( + endpoint, + json={'jsonrpc': '2.0', 'method': 'eth_chainId', 'params': [], 'id': 1}, + ) + if response.status_code != 200: + error_exit('Failed to get chain ID from endpoint', CLIExitCodes.FAILURE) + return int(response.json()['result'], 16) + except requests.RequestException as e: + error_exit(f'Failed to get chain ID: {str(e)}', CLIExitCodes.FAILURE) + # Will never reach this line, but needed for type checking. + return 0 + + +def get_network_metadata() -> Dict: + """Fetch network metadata from GitHub.""" + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + try: + response = requests.get(metadata_url) + if response.status_code != 200: + error_exit('Failed to fetch networks metadata', CLIExitCodes.FAILURE) + return response.json() + except requests.RequestException as e: + error_exit(f'Failed to fetch networks metadata: {str(e)}', CLIExitCodes.FAILURE) + # Will never reach this line, but needed for type checking. + return {} class NotValidEnvParamsError(Exception): - """Raised when something is wrong with provided env params""" + """Raised when environment parameters are invalid or missing.""" diff --git a/node_cli/core/host.py b/node_cli/core/host.py index 0456ac89..d66c1a54 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -17,36 +17,45 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import json import logging import os from shutil import copyfile from urllib.parse import urlparse from node_cli.core.resources import update_resource_allocation +from node_cli.utils.helper import error_exit +from node_cli.utils.exit_codes import CLIExitCodes from node_cli.configs import ( - ADMIN_PORT, AUTOLOAD_KERNEL_MODULES_PATH, - BTRFS_KERNEL_MODULE, DEFAULT_URL_SCHEME, NODE_DATA_PATH, - SKALE_DIR, CONTAINER_CONFIG_PATH, CONTRACTS_PATH, - ETH_STATE_PATH, NODE_CERTS_PATH, SGX_CERTS_PATH, - REPORTS_PATH, REDIS_DATA_PATH, - SCHAINS_DATA_PATH, LOG_PATH, + ADMIN_PORT, + AUTOLOAD_KERNEL_MODULES_PATH, + BTRFS_KERNEL_MODULE, + DEFAULT_URL_SCHEME, + NODE_DATA_PATH, + SKALE_DIR, + CONTAINER_CONFIG_PATH, + CONTRACTS_PATH, + ETH_STATE_PATH, + NODE_CERTS_PATH, + SGX_CERTS_PATH, + REPORTS_PATH, + REDIS_DATA_PATH, + SCHAINS_DATA_PATH, + LOG_PATH, REMOVED_CONTAINERS_FOLDER_PATH, - IMA_CONTRACTS_FILEPATH, MANAGER_CONTRACTS_FILEPATH, - SKALE_RUN_DIR, SKALE_STATE_DIR, SKALE_TMP_DIR, - UFW_CONFIG_PATH, UFW_IPV6_BEFORE_INPUT_CHAIN -) -from node_cli.configs.resource_allocation import ( - RESOURCE_ALLOCATION_FILEPATH + SKALE_RUN_DIR, + SKALE_STATE_DIR, + SKALE_TMP_DIR, + UFW_CONFIG_PATH, + UFW_IPV6_BEFORE_INPUT_CHAIN, ) +from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH from node_cli.configs.cli_logger import LOG_DATA_PATH from node_cli.configs.env import SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH from node_cli.core.nftables import NFTablesManager from node_cli.utils.helper import safe_mkdir -from node_cli.utils.print_formatters import print_abi_validation_errors -from node_cli.utils.helper import safe_load_texts, validate_abi +from node_cli.utils.helper import safe_load_texts TEXTS = safe_load_texts() @@ -65,52 +74,91 @@ def fix_url(url): return False -def get_flask_secret_key(): +def get_flask_secret_key() -> str: + """Retrieve Flask secret key from filesystem.""" secret_key_filepath = os.path.join(NODE_DATA_PATH, 'flask_db_key.txt') - with open(secret_key_filepath) as key_file: - return key_file.read().strip() + + if not os.path.exists(secret_key_filepath): + error_exit( + f'Flask secret key file not found at {secret_key_filepath}', CLIExitCodes.FAILURE + ) + + try: + with open(secret_key_filepath, 'r') as key_file: + secret_key = key_file.read().strip() + return secret_key + except (IOError, OSError) as e: + error_exit(f'Failed to read Flask secret key: {e}', CLIExitCodes.FAILURE) + # Will never reach here, but needed for type checking. + return '' -def prepare_host( - env_filepath: str, - env_type: str, - allocation: bool = False -): - logger.info('Preparing host started') - make_dirs() - save_env_params(env_filepath) +def prepare_host(env_filepath: str, env_type: str, allocation: bool = False) -> None: + """Initialize SKALE node host environment.""" + if not env_filepath or not env_type: + error_exit('Missing required parameters for host initialization', CLIExitCodes.FAILURE) - if allocation: - update_resource_allocation(env_type) + try: + logger.info('Preparing host started') + make_dirs() + save_env_params(env_filepath) + if allocation: + update_resource_allocation(env_type) + except Exception as e: + error_exit(f'Failed to prepare host: {str(e)}', CLIExitCodes.FAILURE) -def is_node_inited(): - return os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) + +def is_node_inited() -> bool: + """Check if the SKALE node has been initialized. + + Determines initialization status by checking for existence of the + resource allocation file. + """ + try: + # Check if resource allocation file exists as initialization indicator + return os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) + except OSError as e: + logger.error(f'Error checking node initialization status: {e}') + return False def make_dirs(): for dir_path in ( - SKALE_DIR, NODE_DATA_PATH, CONTAINER_CONFIG_PATH, - CONTRACTS_PATH, ETH_STATE_PATH, NODE_CERTS_PATH, - REMOVED_CONTAINERS_FOLDER_PATH, - SGX_CERTS_PATH, SCHAINS_DATA_PATH, LOG_PATH, - REPORTS_PATH, REDIS_DATA_PATH, - SKALE_RUN_DIR, SKALE_STATE_DIR, SKALE_TMP_DIR + SKALE_DIR, + NODE_DATA_PATH, + CONTAINER_CONFIG_PATH, + CONTRACTS_PATH, + ETH_STATE_PATH, + NODE_CERTS_PATH, + REMOVED_CONTAINERS_FOLDER_PATH, + SGX_CERTS_PATH, + SCHAINS_DATA_PATH, + LOG_PATH, + REPORTS_PATH, + REDIS_DATA_PATH, + SKALE_RUN_DIR, + SKALE_STATE_DIR, + SKALE_TMP_DIR, ): safe_mkdir(dir_path) -def save_env_params(env_filepath): - copyfile(env_filepath, SKALE_DIR_ENV_FILEPATH) +def save_env_params(env_filepath: str) -> None: + """Copy environment parameters file to SKALE directory.""" + if not os.path.isfile(env_filepath): + error_exit(f'Environment file not found: {env_filepath}', CLIExitCodes.FAILURE) + if not os.access(env_filepath, os.R_OK): + error_exit(f'Cannot read environment file: {env_filepath}', CLIExitCodes.FAILURE) + try: + copyfile(env_filepath, SKALE_DIR_ENV_FILEPATH) + except (IOError, OSError) as e: + error_exit(f'Failed to copy environment file: {e}', CLIExitCodes.FAILURE) def link_env_file(): - if not (os.path.islink(CONFIGS_ENV_FILEPATH) or - os.path.isfile(CONFIGS_ENV_FILEPATH)): - logger.info( - 'Creating symlink %s → %s', - SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH - ) + if not (os.path.islink(CONFIGS_ENV_FILEPATH) or os.path.isfile(CONFIGS_ENV_FILEPATH)): + logger.info('Creating symlink %s → %s', SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH) os.symlink(SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH) @@ -130,10 +178,7 @@ def is_btrfs_module_autoloaded(modules_filepath=AUTOLOAD_KERNEL_MODULES_PATH): modules = set( map( lambda line: line.strip(), - filter( - lambda line: not line.startswith('#'), - modules_file.readlines() - ) + filter(lambda line: not line.startswith('#'), modules_file.readlines()), ) ) return BTRFS_KERNEL_MODULE in modules @@ -144,9 +189,7 @@ def add_btrfs_module_to_autoload(modules_filepath=AUTOLOAD_KERNEL_MODULES_PATH): modules_file.write(f'{BTRFS_KERNEL_MODULE}\n') -def ensure_btrfs_kernel_module_autoloaded( - modules_filepath=AUTOLOAD_KERNEL_MODULES_PATH -): +def ensure_btrfs_kernel_module_autoloaded(modules_filepath=AUTOLOAD_KERNEL_MODULES_PATH): logger.debug('Checking if btrfs is in %s', modules_filepath) if not is_btrfs_module_autoloaded(modules_filepath): logger.info('Adding btrfs module to %s', modules_filepath) @@ -155,24 +198,6 @@ def ensure_btrfs_kernel_module_autoloaded( logger.debug('btrfs is already in %s', modules_filepath) -def validate_abi_files(json_result=False): - results = [ - validate_abi(abi_filepath) - for abi_filepath in [ - MANAGER_CONTRACTS_FILEPATH, - IMA_CONTRACTS_FILEPATH - ] - ] - if any(r['status'] == 'error' for r in results): - print('Some files do not exist or are incorrect') - print_abi_validation_errors(results, raw=json_result) - else: - if json_result: - print(json.dumps({'result': 'ok'})) - else: - print('All abi files are correct json files!') - - def is_ufw_ipv6_option_enabled() -> bool: """Check if UFW is enabled and IPv6 is configured.""" if os.path.isfile(UFW_CONFIG_PATH): diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 0d5eeb72..e51209b0 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -68,7 +68,6 @@ error_exit, get_request, post_request, - extract_env_params, ) from node_cli.utils.meta import get_meta_info from node_cli.utils.texts import Texts @@ -140,8 +139,6 @@ def register_node(name, p2p_ip, public_ip, port, domain_name): @check_not_inited def init(env_filepath): env = compose_node_env(env_filepath) - if env is None: - return inited_ok = init_op(env_filepath, env) if not inited_ok: @@ -213,7 +210,7 @@ def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: @check_inited @check_user def repair_sync(archive: bool, historic_state: bool, snapshot_from: str) -> None: - env_params = extract_env_params(INIT_ENV_FILEPATH, sync_node=True) + env_params = get_env_config(INIT_ENV_FILEPATH, sync_node=True) schain_name = env_params['SCHAIN_NAME'] repair_sync_op( schain_name=schain_name, @@ -225,21 +222,25 @@ def repair_sync(archive: bool, historic_state: bool, snapshot_from: str) -> None def compose_node_env( - env_filepath, - inited_node=False, - sync_schains=None, - pull_config_for_schain=None, - sync_node=False, - save: bool = True -): + env_filepath: Optional[str], + inited_node: bool = False, + sync_schains: Optional[bool] = None, + pull_config_for_schain: Optional[str] = None, + sync_node: bool = False, + save: bool = True, +) -> dict: + """Compose environment variables dictionary for SKALE node.""" if env_filepath is not None: - env_params = extract_env_params(env_filepath, sync_node=sync_node, raise_for_status=True) + env_params = get_env_config(env_filepath, sync_node=sync_node) if save: save_env_params(env_filepath) else: - env_params = extract_env_params(INIT_ENV_FILEPATH, sync_node=sync_node) + env_params = get_env_config(INIT_ENV_FILEPATH, sync_node=sync_node) + # Set mount directory based on node type mnt_dir = SCHAINS_MNT_DIR_SYNC if sync_node else SCHAINS_MNT_DIR_REGULAR + + # Compose base environment dictionary env = { 'SKALE_DIR': SKALE_DIR, 'SCHAINS_MNT_DIR': mnt_dir, @@ -247,13 +248,20 @@ def compose_node_env( 'SKALE_LIB_PATH': SKALE_STATE_DIR, **env_params, } + + # Add Flask secret key for initialized non-sync nodes if inited_node and not sync_node: - flask_secret_key = get_flask_secret_key() - env['FLASK_SECRET_KEY'] = flask_secret_key + env['FLASK_SECRET_KEY'] = get_flask_secret_key() + + # Enable backup run for syncing schains if sync_schains and not sync_node: env['BACKUP_RUN'] = 'True' + + # Add schain config pull parameter if specified if pull_config_for_schain: env['PULL_CONFIG_FOR_SCHAIN'] = pull_config_for_schain + + # Remove empty values and return return {k: v for k, v in env.items() if v != ''} diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index f47ef792..64467c23 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -24,16 +24,21 @@ import psutil +from node_cli.configs.env import get_env_config from node_cli.utils.docker_utils import ensure_volume from node_cli.utils.schain_types import SchainTypes -from node_cli.utils.helper import ( - write_json, read_json, run_cmd, extract_env_params, safe_load_yml -) +from node_cli.utils.helper import write_json, read_json, run_cmd, safe_load_yml from node_cli.configs import ALLOCATION_FILEPATH, STATIC_PARAMS_FILEPATH, SNAPSHOTS_SHARED_VOLUME from node_cli.configs.resource_allocation import ( - RESOURCE_ALLOCATION_FILEPATH, TIMES, TIMEOUT, - TEST_DIVIDER, SMALL_DIVIDER, MEDIUM_DIVIDER, LARGE_DIVIDER, - MEMORY_FACTOR, MAX_CPU_SHARES + RESOURCE_ALLOCATION_FILEPATH, + TIMES, + TIMEOUT, + TEST_DIVIDER, + SMALL_DIVIDER, + MEDIUM_DIVIDER, + LARGE_DIVIDER, + MEMORY_FACTOR, + MAX_CPU_SHARES, ) logger = logging.getLogger(__name__) @@ -50,7 +55,7 @@ def __init__(self, value, fractional=False): 'test': value / TEST_DIVIDER, 'small': value / SMALL_DIVIDER, 'medium': value / MEDIUM_DIVIDER, - 'large': value / LARGE_DIVIDER + 'large': value / LARGE_DIVIDER, } if not fractional: for k in self.values: @@ -67,10 +72,7 @@ def get_resource_allocation_info(): return None -def compose_resource_allocation_config( - env_type: str, - params_by_env_type: Dict = None -) -> Dict: +def compose_resource_allocation_config(env_type: str, params_by_env_type: Dict = None) -> Dict: params_by_env_type = params_by_env_type or safe_load_yml(STATIC_PARAMS_FILEPATH) common_config = params_by_env_type['common'] schain_cpu_alloc, ima_cpu_alloc = get_cpu_alloc(common_config) @@ -83,12 +85,9 @@ def compose_resource_allocation_config( 'mem': schain_mem_alloc.dict(), 'disk': schain_allocation_data[env_type]['disk'], 'volume_limits': schain_allocation_data[env_type]['volume_limits'], # noqa - 'leveldb_limits': schain_allocation_data[env_type]['leveldb_limits'] # noqa + 'leveldb_limits': schain_allocation_data[env_type]['leveldb_limits'], # noqa }, - 'ima': { - 'cpu_shares': ima_cpu_alloc.dict(), - 'mem': ima_mem_alloc.dict() - } + 'ima': {'cpu_shares': ima_cpu_alloc.dict(), 'mem': ima_mem_alloc.dict()}, } @@ -98,22 +97,17 @@ def generate_resource_allocation_config(env_file, force=False) -> None: logger.debug(msg) print(msg) return - env_params = extract_env_params(env_file) + env_params = get_env_config(env_file) if env_params is None: return logger.info('Generating resource allocation file ...') try: - update_resource_allocation( - env_params['ENV_TYPE'] - ) + update_resource_allocation(env_params['ENV_TYPE']) except Exception as e: logger.exception(e) - print('Can\'t generate resource allocation file, check out CLI logs') + print("Can't generate resource allocation file, check out CLI logs") else: - print( - f'Resource allocation file generated: ' - f'{RESOURCE_ALLOCATION_FILEPATH}' - ) + print(f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}') def update_resource_allocation(env_type: str) -> None: @@ -151,10 +145,7 @@ def get_cpu_alloc(common_config: Dict) -> ResourceAlloc: cpu_proportions = common_config['schain']['cpu'] schain_max_cpu_shares = int(cpu_proportions['skaled'] * MAX_CPU_SHARES) ima_max_cpu_shares = int(cpu_proportions['ima'] * MAX_CPU_SHARES) - return ( - ResourceAlloc(schain_max_cpu_shares), - ResourceAlloc(ima_max_cpu_shares) - ) + return (ResourceAlloc(schain_max_cpu_shares), ResourceAlloc(ima_max_cpu_shares)) def verify_disk_size( diff --git a/node_cli/main.py b/node_cli/main.py index 6fefa7b9..d6331fd2 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -36,10 +36,10 @@ from node_cli.cli.wallet import wallet_cli from node_cli.cli.ssl import ssl_cli from node_cli.cli.exit import exit_cli -from node_cli.cli.validate import validate_cli from node_cli.cli.resources_allocation import resources_allocation_cli from node_cli.cli.sync_node import sync_node_cli +from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import safe_load_texts, init_default_logger from node_cli.configs import LONG_LINE from node_cli.core.host import init_logs_dir @@ -55,7 +55,7 @@ def cli(): pass -@cli.command('version', help="Show SKALE node CLI version") +@cli.command('version', help='Show SKALE node CLI version') @click.option('--short', is_flag=True) def version(short): if short: @@ -64,9 +64,10 @@ def version(short): print(f'SKALE Node CLI version: {VERSION}') -@cli.command('info', help="Show SKALE node CLI info") +@cli.command('info', help='Show SKALE node CLI info') def info(): - print(inspect.cleandoc(f''' + print( + inspect.cleandoc(f""" {LONG_LINE} Version: {__version__} Full version: {VERSION} @@ -75,7 +76,8 @@ def info(): Commit: {COMMIT} Git branch: {BRANCH} {LONG_LINE} - ''')) + """) + ) def get_sources_list() -> List[click.MultiCommand]: @@ -93,8 +95,7 @@ def get_sources_list() -> List[click.MultiCommand]: wallet_cli, ssl_cli, exit_cli, - validate_cli, - lvmpy_cli + lvmpy_cli, ] @@ -102,8 +103,7 @@ def handle_exception(exc_type, exc_value, exc_traceback): if issubclass(exc_type, KeyboardInterrupt): sys.__excepthook__(exc_type, exc_value, exc_traceback) return - logger.error("Uncaught exception", - exc_info=(exc_type, exc_value, exc_traceback)) + logger.error('Uncaught exception', exc_info=(exc_type, exc_value, exc_traceback)) sys.excepthook = handle_exception @@ -123,5 +123,5 @@ def handle_exception(exc_type, exc_value, exc_traceback): except Exception as err: traceback.print_exc() logger.debug('Execution time: %d seconds', time.time() - start_time) - error_exit(err) + error_exit(err, CLIExitCodes.FAILURE) logger.debug('Execution time: %d seconds', time.time() - start_time) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 540f0188..f0eaec08 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -32,17 +32,11 @@ from node_cli.core.node_options import NodeOptions from node_cli.core.resources import update_resource_allocation, init_shared_space_volume -from node_cli.operations.common import ( - backup_old_contracts, - download_contracts, - configure_filebeat, - configure_flask, - unpack_backup_archive -) +from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive from node_cli.operations.volume import ( cleanup_volume_artifacts, ensure_filestorage_mapping, - prepare_block_device + prepare_block_device, ) from node_cli.operations.docker_lvmpy import lvmpy_install # noqa from node_cli.operations.skale_node import download_skale_node, sync_skale_node, update_images @@ -55,7 +49,7 @@ remove_dynamic_containers, remove_schain_container, start_admin, - stop_admin + stop_admin, ) from node_cli.utils.meta import get_meta_info, update_meta from node_cli.utils.print_formatters import print_failed_requirements_checks @@ -68,15 +62,12 @@ def checked_host(func): @functools.wraps(func) def wrapper(env_filepath: str, env: Dict, *args, **kwargs): - download_skale_node( - env['CONTAINER_CONFIGS_STREAM'], - env.get('CONTAINER_CONFIGS_DIR') - ) + download_skale_node(env.get('CONTAINER_CONFIGS_STREAM'), env.get('CONTAINER_CONFIGS_DIR')) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], env['ENV_TYPE'], CONTAINER_CONFIG_TMP_PATH, - check_type=CheckType.PREINSTALL + check_type=CheckType.PREINSTALL, ) if failed_checks: print_failed_requirements_checks(failed_checks) @@ -90,7 +81,7 @@ def wrapper(env_filepath: str, env: Dict, *args, **kwargs): env['DISK_MOUNTPOINT'], env['ENV_TYPE'], CONTAINER_CONFIG_PATH, - check_type=CheckType.POSTINSTALL + check_type=CheckType.POSTINSTALL, ) if failed_checks: print_failed_requirements_checks(failed_checks) @@ -114,17 +105,10 @@ def update(env_filepath: str, env: Dict) -> None: enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) configure_nftables(enable_monitoring=enable_monitoring) - backup_old_contracts() - download_contracts(env) - lvmpy_install(env) generate_nginx_config() - prepare_host( - env_filepath, - env['ENV_TYPE'], - allocation=True - ) + prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) init_shared_space_volume(env['ENV_TYPE']) current_stream = get_meta_info().config_stream @@ -133,7 +117,7 @@ def update(env_filepath: str, env: Dict) -> None: logger.info( 'Stream version was changed from %s to %s', current_stream, - env['CONTAINER_CONFIGS_STREAM'] + env['CONTAINER_CONFIGS_STREAM'], ) docker_cleanup() @@ -142,7 +126,7 @@ def update(env_filepath: str, env: Dict) -> None: env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], distro.id(), - distro.version() + distro.version(), ) update_images(env=env) compose_up(env) @@ -160,12 +144,8 @@ def init(env_filepath: str, env: dict) -> bool: enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) configure_nftables(enable_monitoring=enable_monitoring) - prepare_host( - env_filepath, - env_type=env['ENV_TYPE'] - ) + prepare_host(env_filepath, env_type=env['ENV_TYPE']) link_env_file() - download_contracts(env) configure_filebeat() configure_flask() @@ -179,7 +159,7 @@ def init(env_filepath: str, env: dict) -> bool: env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], distro.id(), - distro.version() + distro.version(), ) update_resource_allocation(env_type=env['ENV_TYPE']) update_images(env=env) @@ -189,17 +169,10 @@ def init(env_filepath: str, env: dict) -> bool: def init_sync( - env_filepath: str, - env: dict, - archive: bool, - historic_state: bool, - snapshot_from: Optional[str] + env_filepath: str, env: dict, archive: bool, historic_state: bool, snapshot_from: Optional[str] ) -> bool: cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) - download_skale_node( - env.get('CONTAINER_CONFIGS_STREAM'), - env.get('CONTAINER_CONFIGS_DIR') - ) + download_skale_node(env.get('CONTAINER_CONFIGS_STREAM'), env.get('CONTAINER_CONFIGS_DIR')) sync_skale_node() if env.get('SKIP_DOCKER_CONFIG') != 'True': @@ -220,20 +193,16 @@ def init_sync( ensure_filestorage_mapping() link_env_file() - download_contracts(env) generate_nginx_config() - prepare_block_device( - env['DISK_MOUNTPOINT'], - force=env['ENFORCE_BTRFS'] == 'True' - ) + prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], distro.id(), - distro.version() + distro.version(), ) update_resource_allocation(env_type=env['ENV_TYPE']) @@ -251,10 +220,7 @@ def update_sync(env_filepath: str, env: Dict) -> bool: compose_rm(env, sync_node=True) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) - download_skale_node( - env['CONTAINER_CONFIGS_STREAM'], - env.get('CONTAINER_CONFIGS_DIR') - ) + download_skale_node(env['CONTAINER_CONFIGS_STREAM'], env.get('CONTAINER_CONFIGS_DIR')) sync_skale_node() if env.get('SKIP_DOCKER_CONFIG') != 'True': @@ -264,27 +230,18 @@ def update_sync(env_filepath: str, env: Dict) -> bool: configure_nftables(enable_monitoring=enable_monitoring) ensure_filestorage_mapping() - backup_old_contracts() - download_contracts(env) - prepare_block_device( - env['DISK_MOUNTPOINT'], - force=env['ENFORCE_BTRFS'] == 'True' - ) + prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') generate_nginx_config() - prepare_host( - env_filepath, - env['ENV_TYPE'], - allocation=True - ) + prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], distro.id(), - distro.version() + distro.version(), ) update_images(env=env, sync_node=True) @@ -306,7 +263,7 @@ def turn_on(env: dict) -> None: env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], distro.id(), - distro.version() + distro.version(), ) if env.get('SKIP_DOCKER_CONFIG') != 'True': configure_docker() @@ -324,7 +281,7 @@ def restore(env, backup_path, config_only=False): env['DISK_MOUNTPOINT'], env['ENV_TYPE'], CONTAINER_CONFIG_PATH, - check_type=CheckType.PREINSTALL + check_type=CheckType.PREINSTALL, ) if failed_checks: print_failed_requirements_checks(failed_checks) @@ -347,7 +304,7 @@ def restore(env, backup_path, config_only=False): env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], distro.id(), - distro.version() + distro.version(), ) update_resource_allocation(env_type=env['ENV_TYPE']) @@ -358,7 +315,7 @@ def restore(env, backup_path, config_only=False): env['DISK_MOUNTPOINT'], env['ENV_TYPE'], CONTAINER_CONFIG_PATH, - check_type=CheckType.POSTINSTALL + check_type=CheckType.POSTINSTALL, ) if failed_checks: print_failed_requirements_checks(failed_checks) @@ -367,10 +324,7 @@ def restore(env, backup_path, config_only=False): def repair_sync( - schain_name: str, - archive: bool, - historic_state: bool, - snapshot_from: Optional[str] + schain_name: str, archive: bool, historic_state: bool, snapshot_from: Optional[str] ) -> None: stop_admin(sync_node=True) remove_schain_container(schain_name=schain_name) diff --git a/node_cli/operations/common.py b/node_cli/operations/common.py index cfe79b42..7e484cdf 100644 --- a/node_cli/operations/common.py +++ b/node_cli/operations/common.py @@ -24,44 +24,23 @@ import shutil import secrets -import urllib.request from shutil import copyfile -from distutils.dir_util import copy_tree from node_cli.configs import ( - CONTRACTS_PATH, - BACKUP_CONTRACTS_PATH, G_CONF_HOME, FILEBEAT_CONFIG_PATH, FLASK_SECRET_KEY_FILE, - IMA_CONTRACTS_FILEPATH, - MANAGER_CONTRACTS_FILEPATH, - SRC_FILEBEAT_CONFIG_PATH + SRC_FILEBEAT_CONFIG_PATH, ) logger = logging.getLogger(__name__) -def backup_old_contracts(): - logging.info('Copying old contracts ABIs') - copy_tree(CONTRACTS_PATH, BACKUP_CONTRACTS_PATH) - - -def download_contracts(env): - urllib.request.urlretrieve(env['MANAGER_CONTRACTS_ABI_URL'], MANAGER_CONTRACTS_FILEPATH) - urllib.request.urlretrieve(env['IMA_CONTRACTS_ABI_URL'], IMA_CONTRACTS_FILEPATH) - - def configure_filebeat(): logger.info('Configuring filebeat...') copyfile(SRC_FILEBEAT_CONFIG_PATH, FILEBEAT_CONFIG_PATH) shutil.chown(FILEBEAT_CONFIG_PATH, user='root') - os.chmod( - FILEBEAT_CONFIG_PATH, - stat.S_IREAD | - stat.S_IWRITE | - stat.S_IEXEC - ) + os.chmod(FILEBEAT_CONFIG_PATH, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) logger.info('Filebeat configured') diff --git a/node_cli/operations/docker_lvmpy.py b/node_cli/operations/docker_lvmpy.py index 1810516b..6e28d58c 100644 --- a/node_cli/operations/docker_lvmpy.py +++ b/node_cli/operations/docker_lvmpy.py @@ -33,7 +33,7 @@ LVMPY_CRON_LOG_PATH, LVMPY_CRON_SCHEDULE_MINUTES, SCHAINS_MNT_DIR_REGULAR, - VOLUME_GROUP + VOLUME_GROUP, ) from lvmpy.src.install import setup as setup_lvmpy @@ -57,20 +57,14 @@ def ensure_filestorage_mapping(mapping_dir=FILESTORAGE_MAPPING): def sync_docker_lvmpy_repo(env): if os.path.isdir(DOCKER_LVMPY_PATH): shutil.rmtree(DOCKER_LVMPY_PATH) - sync_repo( - DOCKER_LVMPY_REPO_URL, - DOCKER_LVMPY_PATH, - env["DOCKER_LVMPY_STREAM"] - ) + sync_repo(DOCKER_LVMPY_REPO_URL, DOCKER_LVMPY_PATH, env['DOCKER_LVMPY_STREAM']) def lvmpy_install(env): ensure_filestorage_mapping() logging.info('Configuring and starting lvmpy') setup_lvmpy( - block_device=env['DISK_MOUNTPOINT'], - volume_group=VOLUME_GROUP, - exec_start=LVMPY_RUN_CMD + block_device=env['DISK_MOUNTPOINT'], volume_group=VOLUME_GROUP, exec_start=LVMPY_RUN_CMD ) init_healing_cron() logger.info('docker-lvmpy is configured and started') @@ -86,7 +80,5 @@ def init_healing_cron(): if legacy_line in jobs: c.remove_all(command=legacy_line) if cron_line not in jobs: - job = c.new( - command=cron_line - ) + job = c.new(command=cron_line) job.minute.every(LVMPY_CRON_SCHEDULE_MINUTES) diff --git a/node_cli/operations/skale_node.py b/node_cli/operations/skale_node.py index d91e4765..fd3b6f8a 100644 --- a/node_cli/operations/skale_node.py +++ b/node_cli/operations/skale_node.py @@ -22,14 +22,11 @@ import shutil from typing import Optional -from node_cli.utils.helper import rm_dir, rsync_dirs, safe_mkdir +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import rm_dir, rsync_dirs, safe_mkdir, error_exit from node_cli.utils.git_utils import clone_repo from node_cli.utils.docker_utils import compose_pull, compose_build -from node_cli.configs import ( - CONTAINER_CONFIG_PATH, - CONTAINER_CONFIG_TMP_PATH, - SKALE_NODE_REPO_URL -) +from node_cli.configs import CONTAINER_CONFIG_PATH, CONTAINER_CONFIG_TMP_PATH, SKALE_NODE_REPO_URL logger = logging.getLogger(__name__) @@ -43,17 +40,35 @@ def update_images(env: dict, sync_node: bool = False) -> None: compose_pull(env=env, sync_node=sync_node) -def download_skale_node(stream: Optional[str], src: Optional[str]) -> None: - rm_dir(CONTAINER_CONFIG_TMP_PATH) - safe_mkdir(CONTAINER_CONFIG_TMP_PATH) - dest = CONTAINER_CONFIG_TMP_PATH - if src: - rsync_dirs(src, dest) - else: - clone_repo( - SKALE_NODE_REPO_URL, - CONTAINER_CONFIG_TMP_PATH, - stream +def download_skale_node(stream: Optional[str] = None, src: Optional[str] = None) -> None: + """Downloads SKALE node config from repo or local directory""" + if not src and not stream: + error_exit('Either src path or stream must be provided', exit_code=CLIExitCodes.FAILURE) + + try: + rm_dir(CONTAINER_CONFIG_TMP_PATH) + safe_mkdir(CONTAINER_CONFIG_TMP_PATH) + dest = CONTAINER_CONFIG_TMP_PATH + + if src: + if not os.path.isdir(src): + error_exit( + f'Source directory does not exist: {src}', exit_code=CLIExitCodes.FAILURE + ) + logger.info(f'Syncing config files from {src}') + rsync_dirs(src, dest) + elif stream: + logger.info(f'Cloning config files from {SKALE_NODE_REPO_URL} ({stream})') + clone_repo(SKALE_NODE_REPO_URL, dest, stream) + else: + # Should never reach this point + error_exit('Either src path or stream must be provided', exit_code=CLIExitCodes.FAILURE) + + except (OSError, RuntimeError) as err: + rm_dir(CONTAINER_CONFIG_TMP_PATH) + error_exit( + f'Failed to download node configuration: {err}', + exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR, ) diff --git a/node_cli/utils/decorators.py b/node_cli/utils/decorators.py index 95c822e8..f0d11e00 100644 --- a/node_cli/utils/decorators.py +++ b/node_cli/utils/decorators.py @@ -30,11 +30,14 @@ def check_not_inited(f): + """Decorator that checks if node is not already initialized.""" + @wraps(f) def inner(*args, **kwargs): if is_node_inited(): error_exit(TEXTS['node']['already_inited'], exit_code=CLIExitCodes.NODE_STATE_ERROR) return f(*args, **kwargs) + return inner @@ -44,6 +47,7 @@ def inner(*args, **kwargs): if not is_node_inited(): error_exit(TEXTS['node']['not_inited'], exit_code=CLIExitCodes.NODE_STATE_ERROR) return f(*args, **kwargs) + return inner @@ -53,8 +57,9 @@ def inner(*args, **kwargs): if not is_user_valid(): g_conf_user = get_g_conf_user() current_user = get_system_user() - error_msg = f'You couldn\'t execute this command from user {current_user}. \ -Allowed: {g_conf_user} or root.' + error_msg = f"You couldn't execute this command from user {current_user}. \ +Allowed: {g_conf_user} or root." error_exit(error_msg, exit_code=CLIExitCodes.BAD_USER_ERROR) return f(*args, **kwargs) + return inner diff --git a/node_cli/utils/git_utils.py b/node_cli/utils/git_utils.py index be1fd49d..4adfebab 100644 --- a/node_cli/utils/git_utils.py +++ b/node_cli/utils/git_utils.py @@ -21,29 +21,65 @@ import logging from git.repo.base import Repo -from git.exc import GitCommandError - +from git.exc import GitCommandError, GitError +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit logger = logging.getLogger(__name__) def check_is_branch(repo: Repo, ref_name: str) -> bool: + """Check if the given reference name is a valid git branch.""" + if not repo or not isinstance(repo, Repo): + raise ValueError('Invalid repository object') + if not ref_name or not isinstance(ref_name, str): + raise ValueError('Invalid reference name') + try: + # Verify if ref_name exists as a branch using git show-ref repo.git.show_ref('--verify', f'refs/heads/{ref_name}') - logger.debug(f'{ref_name} is branch') + logger.debug(f'{ref_name} is a branch') return True except GitCommandError: - logger.debug(f'{ref_name} is not branch') + # Expected error when reference is not found + logger.debug(f'{ref_name} is not a branch') return False + except GitError as e: + # Git-specific errors (permissions, config, etc) + logger.error(f'Git error checking branch: {str(e)}') + raise RuntimeError(f'Git error checking branch: {str(e)}') from e + except Exception as e: + # Unexpected system errors + logger.error(f'Unexpected error checking branch: {str(e)}') + raise RuntimeError(f'Unexpected error checking branch: {str(e)}') from e def clone_repo(repo_url: str, repo_path: str, ref_name: str) -> None: - logger.info(f'Cloning {repo_url} → {repo_path}') - Repo.clone_from(repo_url, repo_path) - fetch_pull_repo(repo_path, ref_name) + """Clone a git repository and checkout specified reference.""" + if not all([repo_url, repo_path, ref_name]): + error_exit('Empty repository URL, path or reference', CLIExitCodes.FAILURE) + if not all(isinstance(x, str) for x in [repo_url, repo_path, ref_name]): + error_exit('Invalid input types', CLIExitCodes.FAILURE) + + try: + logger.info(f'Cloning {repo_url} → {repo_path}') + Repo.clone_from(repo_url, repo_path) + fetch_pull_repo(repo_path, ref_name) + except GitError as e: + error_exit( + f'Git error cloning repository: {str(e)}', CLIExitCodes.OPERATION_EXECUTION_ERROR + ) + except Exception as e: + error_exit(f'Unexpected error cloning repository: {str(e)}', CLIExitCodes.FAILURE) def sync_repo(repo_url: str, repo_path: str, ref_name: str) -> None: + """Sync Git repository by cloning if not exists or fetching latest changes.""" + if not all([repo_url, repo_path, ref_name]): + error_exit('Empty repository URL, path or reference', CLIExitCodes.FAILURE) + if not all(isinstance(x, str) for x in [repo_url, repo_path, ref_name]): + error_exit('Invalid input types', CLIExitCodes.FAILURE) + logger.info(f'Sync repo {repo_url} → {repo_path}') if not os.path.isdir(os.path.join(repo_path, '.git')): clone_repo(repo_url, repo_path, ref_name) @@ -52,11 +88,32 @@ def sync_repo(repo_url: str, repo_path: str, ref_name: str) -> None: def fetch_pull_repo(repo_path: str, ref_name: str) -> None: - repo = Repo(repo_path) - repo_name = os.path.basename(repo.working_dir) - logger.info(f'Fetching {repo_name} changes') - repo.remotes.origin.fetch() - logger.info(f'Checkouting {repo_path} to {ref_name}') - repo.git.checkout(ref_name) - if check_is_branch(repo, ref_name): - repo.remotes.origin.pull() + """Fetch latest changes and checkout/pull specific git reference.""" + # Validate inputs + if not repo_path or not isinstance(repo_path, str): + error_exit('Invalid repository path', CLIExitCodes.FAILURE) + if not ref_name or not isinstance(ref_name, str): + error_exit('Invalid reference name', CLIExitCodes.FAILURE) + + try: + # Initialize repo and get name for logging + repo = Repo(repo_path) + repo_name = os.path.basename(repo.working_dir) + + # Fetch latest changes + logger.info(f'Fetching latest changes for {repo_name}') + repo.remotes.origin.fetch() + + # Checkout specified reference + logger.info(f'Checking out {ref_name} in {repo_name}') + repo.git.checkout(ref_name) + + # Pull latest changes if ref is a branch + if check_is_branch(repo, ref_name): + logger.info(f'Pulling latest changes for branch {ref_name}') + repo.remotes.origin.pull() + + except GitError as e: + error_exit(f'Git operation failed: {str(e)}', CLIExitCodes.OPERATION_EXECUTION_ERROR) + except Exception as e: + error_exit(f'Repository operation failed: {str(e)}', CLIExitCodes.FAILURE) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 39de440a..261e61b7 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -25,7 +25,7 @@ import sys import uuid from urllib.parse import urlparse -from typing import Optional +from typing import Any, Optional import yaml import shutil @@ -49,20 +49,26 @@ from node_cli.utils.print_formatters import print_err_response from node_cli.utils.exit_codes import CLIExitCodes -from node_cli.configs.env import ( - absent_params as absent_env_params, - get_env_config -) from node_cli.configs import ( - TEXT_FILE, ADMIN_HOST, ADMIN_PORT, HIDE_STREAM_LOG, GLOBAL_SKALE_DIR, - GLOBAL_SKALE_CONF_FILEPATH, DEFAULT_SSH_PORT + TEXT_FILE, + ADMIN_HOST, + ADMIN_PORT, + HIDE_STREAM_LOG, + GLOBAL_SKALE_DIR, + GLOBAL_SKALE_CONF_FILEPATH, + DEFAULT_SSH_PORT, ) from node_cli.configs.routes import get_route from node_cli.utils.global_config import read_g_config, get_system_user from node_cli.configs.cli_logger import ( - FILE_LOG_FORMAT, LOG_BACKUP_COUNT, LOG_FILE_SIZE_BYTES, - LOG_FILEPATH, STREAM_LOG_FORMAT, DEBUG_LOG_FILEPATH) + FILE_LOG_FORMAT, + LOG_BACKUP_COUNT, + LOG_FILE_SIZE_BYTES, + LOG_FILEPATH, + STREAM_LOG_FORMAT, + DEBUG_LOG_FILEPATH, +) logger = logging.getLogger(__name__) @@ -71,7 +77,7 @@ DEFAULT_ERROR_DATA = { 'status': 'error', - 'payload': 'Request failed. Check skale_api container logs' + 'payload': 'Request failed. Check skale_api container logs', } @@ -100,14 +106,7 @@ def init_file(path, content=None): write_json(path, content) -def run_cmd( - cmd, - env={}, - shell=False, - secure=False, - check_code=True, - separate_stderr=False -): +def run_cmd(cmd, env={}, shell=False, secure=False, check_code=True, separate_stderr=False): if not secure: logger.debug(f'Running: {cmd}') else: @@ -115,13 +114,7 @@ def run_cmd( stdout, stderr = subprocess.PIPE, subprocess.PIPE if not separate_stderr: stderr = subprocess.STDOUT - res = subprocess.run( - cmd, - shell=shell, - stdout=stdout, - stderr=stderr, - env={**env, **os.environ} - ) + res = subprocess.run(cmd, shell=shell, stdout=stdout, stderr=stderr, env={**env, **os.environ}) if check_code: output = res.stdout.decode('utf-8') if res.returncode: @@ -152,7 +145,7 @@ def process_template(source, destination, data): """ template = read_file(source) processed_template = Environment().from_string(template).render(data) - with open(destination, "w") as f: + with open(destination, 'w') as f: f.write(processed_template) @@ -160,26 +153,28 @@ def get_username(): return os.environ.get('USERNAME') or os.environ.get('USER') -def extract_env_params(env_filepath, sync_node=False, raise_for_status=True): - env_params = get_env_config(env_filepath, sync_node=sync_node) - absent_params = ', '.join(absent_env_params(env_params)) - if absent_params: - click.echo(f"Your env file({env_filepath}) have some absent params: " - f"{absent_params}.\n" - f"You should specify them to make sure that " - f"all services are working", - err=True) - if raise_for_status: - raise InvalidEnvFileError(f'Missing params: {absent_params}') - return None - return env_params - - def str_to_bool(val): return bool(distutils.util.strtobool(val)) -def error_exit(error_payload, exit_code=CLIExitCodes.FAILURE): +def error_exit(error_payload: Any, exit_code: CLIExitCodes = CLIExitCodes.FAILURE) -> None: + """Print error message and exit the program with specified exit code. + + Args: + error_payload: Error message string or list of error messages + exit_code: Exit code to use when terminating the program (default: FAILURE) + + Raises: + TypeError: If exit_code is not CLIExitCodes + + Example: + >>> error_exit("Permission denied", CLIExitCodes.BAD_USER_ERROR) + Permission denied + + """ + if not isinstance(exit_code, CLIExitCodes): + raise TypeError('exit_code must be CLIExitCodes enum') + print_err_response(error_payload) sys.exit(exit_code.value) @@ -260,7 +255,7 @@ def download_dump(path, container_name=None): error_exit(r.json()) return None d = r.headers['Content-Disposition'] - fname_q = re.findall("filename=(.+)", d)[0] + fname_q = re.findall('filename=(.+)', d)[0] fname = fname_q.replace('"', '') filepath = os.path.join(path, fname) with open(filepath, 'wb') as f: @@ -271,8 +266,7 @@ def download_dump(path, container_name=None): def init_default_logger(): f_handler = get_file_handler(LOG_FILEPATH, logging.INFO) debug_f_handler = get_file_handler(DEBUG_LOG_FILEPATH, logging.DEBUG) - logging.basicConfig( - level=logging.DEBUG, handlers=[f_handler, debug_f_handler]) + logging.basicConfig(level=logging.DEBUG, handlers=[f_handler, debug_f_handler]) def get_stream_handler(): @@ -286,8 +280,8 @@ def get_stream_handler(): def get_file_handler(log_filepath, log_level): formatter = Formatter(FILE_LOG_FORMAT) f_handler = py_handlers.RotatingFileHandler( - log_filepath, maxBytes=LOG_FILE_SIZE_BYTES, - backupCount=LOG_BACKUP_COUNT) + log_filepath, maxBytes=LOG_FILE_SIZE_BYTES, backupCount=LOG_BACKUP_COUNT + ) f_handler.setFormatter(formatter) f_handler.setLevel(log_level) @@ -304,27 +298,15 @@ def to_camel_case(snake_str): return components[0] + ''.join(x.title() for x in components[1:]) -def validate_abi(abi_filepath: str) -> dict: - if not os.path.isfile(abi_filepath): - return {'filepath': abi_filepath, - 'status': 'error', - 'msg': 'No such file'} - try: - with open(abi_filepath) as abi_file: - json.load(abi_file) - except Exception: - return {'filepath': abi_filepath, 'status': 'error', - 'msg': 'Failed to load abi file as json'} - return {'filepath': abi_filepath, 'status': 'ok', 'msg': ''} - - def streamed_cmd(func): - """ Decorator that allow function to print logs into stderr """ + """Decorator that allow function to print logs into stderr""" + @wraps(func) def wrapper(*args, **kwargs): if HIDE_STREAM_LOG is None: logging.getLogger('').addHandler(get_stream_handler()) return func(*args, **kwargs) + return wrapper @@ -349,27 +331,70 @@ def get_g_conf_home(): def rm_dir(folder: str) -> None: - if os.path.exists(folder): - logger.info(f'{folder} exists, removing...') - shutil.rmtree(folder) - else: - logger.info(f'{folder} doesn\'t exist, skipping...') + """Safely remove a directory and all its contents, if it exists.""" + if not isinstance(folder, str): + error_exit(f'folder must be a string, got {type(folder)}', exit_code=CLIExitCodes.FAILURE) + + try: + if os.path.exists(folder): + logger.info(f'Directory {folder} exists, removing...') + shutil.rmtree(folder, ignore_errors=False) + else: + logger.info(f"Directory {folder} doesn't exist, skipping...") + except OSError as e: + logger.error(f'Failed to remove directory {folder}: {e}') + error_exit( + f'Failed to remove directory {folder}: {e}', + exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR, + ) -def safe_mkdir(path: str, print_res: bool = False): +def safe_mkdir(path: str, print_res: bool = False) -> None: + """Create a directory if it doesn't exist.""" + if not isinstance(path, str): + error_exit(f'path must be a string, got {type(path)}', exit_code=CLIExitCodes.FAILURE) + if os.path.exists(path): + logger.debug(f'Directory {path} already exists') return + msg = f'Creating {path} directory...' logger.info(msg) if print_res: print(msg) - os.makedirs(path, exist_ok=True) + + try: + os.makedirs(path, exist_ok=True) + except OSError as e: + logger.error(f'Failed to create directory {path}: {e}') + error_exit( + f'Failed to create directory {path}: {e}', + exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR, + ) def rsync_dirs(src: str, dest: str) -> None: - logger.info(f'Syncing {dest} with {src}') - run_cmd(['rsync', '-r', f'{src}/', dest]) - run_cmd(['rsync', '-r', f'{src}/.git', dest]) + """Synchronize two directories using rsync.""" + if not isinstance(src, str) or not isinstance(dest, str): + error_exit('Source and destination paths must be strings', exit_code=CLIExitCodes.FAILURE) + + if not src.strip() or not dest.strip(): + error_exit('Source and destination paths cannot be empty', exit_code=CLIExitCodes.FAILURE) + + if not os.path.isdir(src): + error_exit(f'Source directory does not exist: {src}', exit_code=CLIExitCodes.FAILURE) + + logger.info(f'Syncing directory {dest} with {src}') + + try: + # Sync all files including hidden ones + run_cmd(['rsync', '-r', f'{src}/', dest]) + run_cmd(['rsync', '-r', f'{src}/.git', dest]) + except subprocess.CalledProcessError as e: + logger.error(f'Rsync failed: {e}') + error_exit( + f'Failed to sync directories: {e}', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR + ) def ok_result(payload: dict = None): @@ -387,8 +412,7 @@ def convert(self, value, param, ctx): try: result = urlparse(value) except ValueError: - self.fail(f'Some characters are not allowed in {value}', - param, ctx) + self.fail(f'Some characters are not allowed in {value}', param, ctx) if not all([result.scheme, result.netloc]): self.fail(f'Expected valid url. Got {value}', param, ctx) return value @@ -401,8 +425,7 @@ def convert(self, value, param, ctx): try: ipaddress.ip_address(value) except ValueError: - self.fail(f'expected valid ipv4/ipv6 address. Got {value}', - param, ctx) + self.fail(f'expected valid ipv4/ipv6 address. Got {value}', param, ctx) return value diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index 69078af2..94e9581b 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -11,16 +11,18 @@ class CliMeta( - namedtuple( - 'Node', - ['version', 'config_stream', 'docker_lvmpy_stream', 'os_id', 'os_version'] - ) + namedtuple('Node', ['version', 'config_stream', 'docker_lvmpy_stream', 'os_id', 'os_version']) ): __slots__ = () - def __new__(cls, version=DEFAULT_VERSION, config_stream=DEFAULT_CONFIG_STREAM, - docker_lvmpy_stream=DEFAULT_DOCKER_LVMPY_STREAM, os_id=DEFAULT_OS_ID, - os_version=DEFAULT_OS_VERSION): + def __new__( + cls, + version=DEFAULT_VERSION, + config_stream=DEFAULT_CONFIG_STREAM, + docker_lvmpy_stream=DEFAULT_DOCKER_LVMPY_STREAM, + os_id=DEFAULT_OS_ID, + os_version=DEFAULT_OS_VERSION, + ): return super(CliMeta, cls).__new__( cls, version, config_stream, docker_lvmpy_stream, os_id, os_version ) @@ -42,10 +44,13 @@ def save_meta(meta: CliMeta) -> None: def compose_default_meta() -> CliMeta: - return CliMeta(version=DEFAULT_VERSION, - docker_lvmpy_stream=DEFAULT_DOCKER_LVMPY_STREAM, - config_stream=DEFAULT_CONFIG_STREAM, os_id=DEFAULT_OS_ID, - os_version=DEFAULT_OS_VERSION) + return CliMeta( + version=DEFAULT_VERSION, + docker_lvmpy_stream=DEFAULT_DOCKER_LVMPY_STREAM, + config_stream=DEFAULT_CONFIG_STREAM, + os_id=DEFAULT_OS_ID, + os_version=DEFAULT_OS_VERSION, + ) def ensure_meta(meta: CliMeta = None) -> None: @@ -54,8 +59,9 @@ def ensure_meta(meta: CliMeta = None) -> None: save_meta(meta) -def update_meta(version: str, config_stream: str, - docker_lvmpy_stream: str, os_id: str, os_version: str) -> None: +def update_meta( + version: str, config_stream: str, docker_lvmpy_stream: str, os_id: str, os_version: str +) -> None: ensure_meta() meta = CliMeta(version, config_stream, docker_lvmpy_stream, os_id, os_version) save_meta(meta) diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index 72225db9..844cc949 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -20,6 +20,7 @@ import os import json import datetime +from typing import Any import texttable from dateutil import parser @@ -34,13 +35,15 @@ def print_wallet_info(wallet): - print(inspect.cleandoc(f''' + print( + inspect.cleandoc(f""" {LONG_LINE} Address: {wallet['address'].lower()} ETH balance: {wallet['eth_balance']} ETH SKALE balance: {wallet['skale_balance']} SKALE {LONG_LINE} - ''')) + """) + ) def get_tty_width(): @@ -63,31 +66,21 @@ def table(self, headers, rows): def format_date(date): - return date.strftime("%b %d %Y %H:%M:%S") + return date.strftime('%b %d %Y %H:%M:%S') def print_containers(containers): - headers = [ - 'Name', - 'Status', - 'Started At', - 'Image' - ] + headers = ['Name', 'Status', 'Started At', 'Image'] rows = [] for container in containers: - date = parser.parse(container["state"]["StartedAt"]) - status = container["state"]["Status"].capitalize() + date = parser.parse(container['state']['StartedAt']) + status = container['state']['Status'].capitalize() if not container['state']['Running']: - finished_date = parser.parse(container["state"]["FinishedAt"]) + finished_date = parser.parse(container['state']['FinishedAt']) status = f'{status} ({format_date(finished_date)})' - rows.append([ - container['name'], - status, - format_date(date), - container['image'] - ]) + rows.append([container['name'], status, format_date(date), container['image']]) print(Formatter().table(headers, rows)) @@ -106,38 +99,29 @@ def print_schains(schains): rows = [] for schain in schains: date = datetime.datetime.fromtimestamp(schain['start_date']) - rows.append([ - schain['name'], - schain['mainnet_owner'], - schain['part_of_node'], - schain['lifetime'], - format_date(date), - schain['deposit'], - schain['generation'], - schain['originator'], - schain['options']['allocation_type'] - ]) + rows.append( + [ + schain['name'], + schain['mainnet_owner'], + schain['part_of_node'], + schain['lifetime'], + format_date(date), + schain['deposit'], + schain['generation'], + schain['originator'], + schain['options']['allocation_type'], + ] + ) print(Formatter().table(headers, rows)) def print_dkg_statuses(statuses): - headers = [ - 'sChain Name', - 'DKG Status', - 'Added At', - 'sChain Status' - ] + headers = ['sChain Name', 'DKG Status', 'Added At', 'sChain Status'] rows = [] for status in statuses: date = datetime.datetime.fromtimestamp(status['added_at']) - schain_status = 'Deleted' \ - if status['is_deleted'] else 'Exists' - rows.append([ - status['name'], - status['dkg_status_name'], - format_date(date), - schain_status - ]) + schain_status = 'Deleted' if status['is_deleted'] else 'Exists' + rows.append([status['name'], status['dkg_status_name'], format_date(date), schain_status]) print(Formatter().table(headers, rows)) @@ -152,23 +136,25 @@ def print_schains_healthchecks(schains): 'IMA', 'Firewall', 'RPC', - 'Blocks' + 'Blocks', ] rows = [] for schain in schains: healthchecks = schain['healthchecks'] - rows.append([ - schain['name'], - healthchecks['config_dir'], - healthchecks['dkg'], - healthchecks['config'], - healthchecks['volume'], - healthchecks['skaled_container'], - healthchecks.get('ima_container', 'No IMA'), - healthchecks['firewall_rules'], - healthchecks['rpc'], - healthchecks['blocks'] - ]) + rows.append( + [ + schain['name'], + healthchecks['config_dir'], + healthchecks['dkg'], + healthchecks['config'], + healthchecks['volume'], + healthchecks['skaled_container'], + healthchecks.get('ima_container', 'No IMA'), + healthchecks['firewall_rules'], + healthchecks['rpc'], + healthchecks['blocks'], + ] + ) print(Formatter().table(headers, rows)) @@ -187,19 +173,11 @@ def print_schains_logs(schains_logs): def print_log_list(logs): - headers = [ - 'Name', - 'Size', - 'Created At' - ] + headers = ['Name', 'Size', 'Created At'] rows = [] for log in logs: date = datetime.datetime.fromtimestamp(log['created_at']) - rows.append([ - log['name'], - log['size'], - format_date(date) - ]) + rows.append([log['name'], log['size'], format_date(date)]) print(Formatter().table(headers, rows)) @@ -209,10 +187,7 @@ def print_dict(title, rows, headers=['Key', 'Value']): def print_exit_status(exit_status_info): - headers = [ - 'Schain name', - 'Status' - ] + headers = ['Schain name', 'Status'] logs = exit_status_info['data'] node_exit_status = exit_status_info['status'].lower() rows = [[log['name'], log['status'].lower()] for log in logs] @@ -230,20 +205,14 @@ def print_firewall_rules(rules, raw=False): print('No allowed endpoints') return if raw: - print(json.dumpes(rules)) - headers = [ - 'IP range', - 'Port' - ] + print(json.dumps(rules)) + headers = ['IP range', 'Port'] rows = [] for rule in sorted(rules, key=lambda r: r['port']): ip_range = 'All IPs' - if rule["first_ip"] and rule["last_ip"]: + if rule['first_ip'] and rule['last_ip']: ip_range = f'{rule["first_ip"]} - {rule["last_ip"]}' - rows.append([ - ip_range, - rule['port'] - ]) + rows.append([ip_range, rule['port']]) print(Formatter().table(headers, rows)) @@ -256,24 +225,13 @@ def print_schain_info(info: dict, raw: bool = False) -> None: print(Formatter().table(headers, [rows])) -def print_abi_validation_errors(info: list, raw: bool = False) -> None: - if not info: - return - if raw: - print(json.dumps(info)) - else: - headers = info[0].keys() - rows = [tuple(r.values()) for r in info] - headers = list(map(lambda h: h.capitalize(), headers)) - print(Formatter().table(headers, rows)) - - def print_node_cmd_error(): print(TEXTS['node']['cmd_failed'].format(DEBUG_LOG_FILEPATH)) def print_node_info(node, node_status): - print(inspect.cleandoc(f""" + print( + inspect.cleandoc(f""" {LONG_LINE} Node info Name: {node['name']} @@ -284,20 +242,41 @@ def print_node_info(node, node_status): Domain name: {node['domain_name']} Status: {node_status} {LONG_LINE} - """)) - - -def print_err_response(error_payload): - if isinstance(error_payload, list): - error_msg = '\n'.join(error_payload) - else: - error_msg = error_payload - - print('Command failed with following errors:') - print(LONG_LINE) - print(error_msg) - print(LONG_LINE) - print(f'You can find more info in {DEBUG_LOG_FILEPATH}') + """) + ) + + +def print_err_response(error_payload: Any) -> None: + """Print formatted error message from API response payload. + + Handles different types of error payloads (str, list, dict etc.) and formats them + into a user-friendly error message along with debug log file location. + """ + try: + if isinstance(error_payload, (list, tuple)): + # Join list items with newlines for multiple errors + error_msg = '\n'.join(str(err) for err in error_payload) + elif isinstance(error_payload, dict): + # Format dict as JSON string + error_msg = json.dumps(error_payload, indent=2) + else: + # Convert any other type to string + error_msg = str(error_payload) + + print('Command failed with following errors:') + print(LONG_LINE) + print(error_msg) + print(LONG_LINE) + print(f'You can find more info in {DEBUG_LOG_FILEPATH}') + + except Exception as e: + # Fallback for unexpected errors while formatting + print('Error occurred while processing error payload:') + print(LONG_LINE) + print(f'Original error payload: {error_payload}') + print(f'Error while formatting: {str(e)}') + print(LONG_LINE) + print(f'Check logs at {DEBUG_LOG_FILEPATH} for more details') def print_failed_requirements_checks(failed_checks: list) -> None: @@ -313,10 +292,12 @@ def print_failed_requirements_checks(failed_checks: list) -> None: def print_meta_info(meta_info: CliMeta) -> None: - print(inspect.cleandoc(f""" + print( + inspect.cleandoc(f""" {LONG_LINE} Version: {meta_info.version} Config Stream: {meta_info.config_stream} Lvmpy stream: {meta_info.docker_lvmpy_stream} {LONG_LINE} - """)) + """) + ) diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 00000000..90d9d4f6 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,4 @@ +line-length = 100 + +[format] +quote-style = "single" \ No newline at end of file diff --git a/setup.py b/setup.py index dc571af4..02ee5fb6 100644 --- a/setup.py +++ b/setup.py @@ -5,14 +5,13 @@ def read(*parts): path = os.path.join(os.path.dirname(__file__), *parts) - f = open(path, "r") + f = open(path, 'r') return f.read() def find_version(*file_paths): version_file = read(*file_paths) - version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", - version_file, re.M) + version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError("Couldn't parse version from file.") @@ -20,29 +19,28 @@ def find_version(*file_paths): extras_require = { 'linter': [ - "flake8==7.1.1", - "isort>=4.2.15,<5.10.2", + 'flake8==7.1.1', + 'isort>=4.2.15,<5.10.2', + 'ruff==0.9.9', ], 'dev': [ - "bumpversion==0.6.0", - "pytest==8.3.2", - "pytest-cov==5.0.0", - "twine==4.0.2", - "mock==4.0.3", - "freezegun==1.2.2" - ] + 'bumpversion==0.6.0', + 'pytest==8.3.2', + 'pytest-cov==5.0.0', + 'twine==4.0.2', + 'mock==4.0.3', + 'freezegun==1.2.2', + ], } -extras_require['dev'] = ( - extras_require['linter'] + extras_require['dev'] -) +extras_require['dev'] = extras_require['linter'] + extras_require['dev'] setup( name='node-cli', # *IMPORTANT*: Don't manually change the version here. # Use the 'bumpversion' utility instead. - version=find_version("node_cli", "cli", "__init__.py"), + version=find_version('node_cli', 'cli', '__init__.py'), include_package_data=True, description='SKALE client tools', long_description_markdown_filename='README.md', @@ -50,34 +48,33 @@ def find_version(*file_paths): author_email='support@skalelabs.com', url='https://github.com/skalenetwork/node-cli', install_requires=[ - "click==8.1.7", - "PyInstaller==5.12.0", - "distro==1.9.0", - "docker==6.0.1", - "texttable==1.6.7", - "python-dateutil==2.8.2", - "Jinja2==3.1.4", - "psutil==5.9.4", - "python-dotenv==0.21.0", - "terminaltables==3.1.10", - "requests==2.28.1", - "GitPython==3.1.41", - "packaging==23.0", - "python-debian==0.1.49", - "PyYAML==6.0", - "pyOpenSSL==24.2.1", - "MarkupSafe==3.0.2", + 'click==8.1.7', + 'PyInstaller==5.12.0', + 'distro==1.9.0', + 'docker==6.0.1', + 'texttable==1.6.7', + 'python-dateutil==2.8.2', + 'Jinja2==3.1.4', + 'psutil==5.9.4', + 'python-dotenv==0.21.0', + 'terminaltables==3.1.10', + 'requests==2.28.1', + 'GitPython==3.1.41', + 'packaging==23.0', + 'python-debian==0.1.49', + 'PyYAML==6.0', + 'pyOpenSSL==24.2.1', + 'MarkupSafe==3.0.2', 'Flask==2.3.3', 'itsdangerous==2.1.2', - "cryptography==42.0.4", - "filelock==3.0.12", + 'cryptography==42.0.4', + 'filelock==3.0.12', 'sh==1.14.2', 'python-crontab==2.6.0', - 'skale-contracts==1.0.1' + 'requests-mock==1.12.1', ], python_requires='>=3.8,<4', extras_require=extras_require, - keywords=['skale', 'cli'], packages=find_packages(exclude=['tests']), classifiers=[ diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 7e206db1..c0339afd 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -83,7 +83,10 @@ def test_register_node_with_error(resource_alloc, mocked_g_config): ['--name', 'test-node2', '--ip', '0.0.0.0', '--port', '80', '-d', 'skale.test'], ) assert result.exit_code == 3 - assert (result.output == f'Command failed with following errors:\n--------------------------------------------------\nStrange error\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n') # noqa + assert ( + result.output + == f'Command failed with following errors:\n--------------------------------------------------\nStrange error\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n' + ) # noqa def test_register_node_with_prompted_ip(resource_alloc, mocked_g_config): @@ -97,7 +100,10 @@ def test_register_node_with_prompted_ip(resource_alloc, mocked_g_config): input='0.0.0.0\n', ) assert result.exit_code == 0 - assert (result.output == 'Enter node public IP: 0.0.0.0\nNode registered in SKALE manager.\nFor more info run < skale node info >\n') # noqa + assert ( + result.output + == 'Enter node public IP: 0.0.0.0\nNode registered in SKALE manager.\nFor more info run < skale node info >\n' + ) # noqa def test_register_node_with_default_port(resource_alloc, mocked_g_config): @@ -111,7 +117,10 @@ def test_register_node_with_default_port(resource_alloc, mocked_g_config): input='0.0.0.0\n', ) assert result.exit_code == 0 - assert (result.output == 'Enter node public IP: 0.0.0.0\nNode registered in SKALE manager.\nFor more info run < skale node info >\n') # noqa + assert ( + result.output + == 'Enter node public IP: 0.0.0.0\nNode registered in SKALE manager.\nFor more info run < skale node info >\n' + ) # noqa def test_register_with_no_alloc(mocked_g_config): @@ -124,7 +133,10 @@ def test_register_with_no_alloc(mocked_g_config): input='0.0.0.0\n', ) assert result.exit_code == 8 - assert (result.output == f"Enter node public IP: 0.0.0.0\nCommand failed with following errors:\n--------------------------------------------------\nNode hasn't been inited before.\nYou should run < skale node init >\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n") # noqa + assert ( + result.output + == f"Enter node public IP: 0.0.0.0\nCommand failed with following errors:\n--------------------------------------------------\nNode hasn't been inited before.\nYou should run < skale node init >\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n" + ) # noqa def test_node_info_node_info(): @@ -149,7 +161,10 @@ def test_node_info_node_info(): resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, node_info) assert result.exit_code == 0 - assert (result.output == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Active\n--------------------------------------------------\n') # noqa + assert ( + result.output + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Active\n--------------------------------------------------\n' + ) # noqa def test_node_info_node_info_not_created(): @@ -199,7 +214,10 @@ def test_node_info_node_info_frozen(): resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, node_info) assert result.exit_code == 0 - assert (result.output == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Frozen\n--------------------------------------------------\n') # noqa + assert ( + result.output + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Frozen\n--------------------------------------------------\n' + ) # noqa def test_node_info_node_info_left(): @@ -224,7 +242,10 @@ def test_node_info_node_info_left(): resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, node_info) assert result.exit_code == 0 - assert (result.output == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Left\n--------------------------------------------------\n') # noqa + assert ( + result.output + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Left\n--------------------------------------------------\n' + ) # noqa def test_node_info_node_info_leaving(): @@ -249,7 +270,10 @@ def test_node_info_node_info_leaving(): resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, node_info) assert result.exit_code == 0 - assert (result.output == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Leaving\n--------------------------------------------------\n') # noqa + assert ( + result.output + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Leaving\n--------------------------------------------------\n' + ) # noqa def test_node_info_node_info_in_maintenance(): @@ -274,7 +298,10 @@ def test_node_info_node_info_in_maintenance(): resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, node_info) assert result.exit_code == 0 - assert (result.output == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: In Maintenance\n--------------------------------------------------\n') # noqa + assert ( + result.output + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: In Maintenance\n--------------------------------------------------\n' + ) # noqa def test_node_signature(): @@ -301,14 +328,18 @@ def test_restore(mocked_g_config): '\n', '' ) - with patch('node_cli.core.node.restore_op', MagicMock()) as mock_restore_op, patch( - 'subprocess.run', new=subprocess_run_mock - ), patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), patch( - 'node_cli.utils.decorators.is_node_inited', return_value=False - ), patch( - 'node_cli.core.node.get_meta_info', - return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), - ), patch('node_cli.operations.base.configure_nftables'): + with ( + patch('node_cli.core.node.restore_op', MagicMock()) as mock_restore_op, + patch('subprocess.run', new=subprocess_run_mock), + patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + patch('node_cli.utils.decorators.is_node_inited', return_value=False), + patch( + 'node_cli.core.node.get_meta_info', + return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), + ), + patch('node_cli.operations.base.configure_nftables'), + patch('node_cli.configs.env.validate_params', lambda params: None), + ): result = run_command(restore_node, [backup_path, './tests/test-env']) assert result.exit_code == 0 assert 'Node is restored from backup\n' in result.output # noqa @@ -323,14 +354,18 @@ def test_restore_no_snapshot(mocked_g_config): '\n', '' ) - with patch('node_cli.core.node.restore_op', MagicMock()) as mock_restore_op, patch( - 'subprocess.run', new=subprocess_run_mock - ), patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), patch( - 'node_cli.utils.decorators.is_node_inited', return_value=False - ), patch( - 'node_cli.core.node.get_meta_info', - return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), - ), patch('node_cli.operations.base.configure_nftables'): + with ( + patch('node_cli.core.node.restore_op', MagicMock()) as mock_restore_op, + patch('subprocess.run', new=subprocess_run_mock), + patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + patch('node_cli.utils.decorators.is_node_inited', return_value=False), + patch( + 'node_cli.core.node.get_meta_info', + return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), + ), + patch('node_cli.operations.base.configure_nftables'), + patch('node_cli.configs.env.validate_params', lambda params: None), + ): result = run_command(restore_node, [backup_path, './tests/test-env', '--no-snapshot']) assert result.exit_code == 0 assert 'Node is restored from backup\n' in result.output # noqa @@ -364,9 +399,12 @@ def test_maintenance_off(mocked_g_config): def test_turn_off_maintenance_on(mocked_g_config): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) - with mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch( - 'node_cli.core.node.turn_off_op' - ), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True): + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.node.turn_off_op'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), + patch('node_cli.configs.env.validate_params', lambda params: None), + ): result = run_command_mock( 'node_cli.utils.helper.requests.post', resp_mock, @@ -378,9 +416,7 @@ def test_turn_off_maintenance_on(mocked_g_config): == 'Setting maintenance mode on...\nNode is successfully set in maintenance mode\n' ) # noqa assert result.exit_code == 0 - with mock.patch( - 'node_cli.utils.docker_utils.is_container_running', return_value=True - ): + with mock.patch('node_cli.utils.docker_utils.is_container_running', return_value=True): result = run_command_mock( 'node_cli.utils.helper.requests.post', resp_mock, @@ -393,11 +429,14 @@ def test_turn_off_maintenance_on(mocked_g_config): def test_turn_on_maintenance_off(mocked_g_config): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) - with mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch( - 'node_cli.core.node.get_flask_secret_key' - ), mock.patch('node_cli.core.node.turn_on_op'), mock.patch( - 'node_cli.core.node.is_base_containers_alive' - ), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True): + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.node.get_flask_secret_key'), + mock.patch('node_cli.core.node.turn_on_op'), + mock.patch('node_cli.core.node.is_base_containers_alive'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), + patch('node_cli.configs.env.validate_params', lambda params: None), + ): result = run_command_mock( 'node_cli.utils.helper.requests.post', resp_mock, @@ -432,7 +471,10 @@ def test_node_version(meta_file_v2): with mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True): result = run_command(version) assert result.exit_code == 0 - assert (result.output == '--------------------------------------------------\nVersion: 0.1.1\nConfig Stream: develop\nLvmpy stream: 1.1.2\n--------------------------------------------------\n') # noqa + assert ( + result.output + == '--------------------------------------------------\nVersion: 0.1.1\nConfig Stream: develop\nLvmpy stream: 1.1.2\n--------------------------------------------------\n' + ) # noqa result = run_command(version, ['--json']) assert result.exit_code == 0 diff --git a/tests/cli/resources_allocation_test.py b/tests/cli/resources_allocation_test.py index d84d9fc3..8836435f 100644 --- a/tests/cli/resources_allocation_test.py +++ b/tests/cli/resources_allocation_test.py @@ -24,9 +24,7 @@ import pytest -from node_cli.configs.resource_allocation import ( - RESOURCE_ALLOCATION_FILEPATH, NODE_DATA_PATH -) +from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH, NODE_DATA_PATH from node_cli.utils.helper import safe_mkdir, write_json from tests.helper import response_mock, run_command_mock @@ -48,11 +46,7 @@ def resource_alloc_config(): def test_show(resource_alloc_config): resp_mock = response_mock(requests.codes.created) write_json(RESOURCE_ALLOCATION_FILEPATH, TEST_CONFIG) - result = run_command_mock( - 'node_cli.utils.helper.post_request', - resp_mock, - show - ) + result = run_command_mock('node_cli.utils.helper.post_request', resp_mock, show) assert result.output == json.dumps(TEST_CONFIG, indent=4) + '\n' assert result.exit_code == 0 @@ -60,40 +54,38 @@ def test_show(resource_alloc_config): def test_generate(): safe_mkdir(NODE_DATA_PATH) resp_mock = response_mock(requests.codes.created) - with mock.patch('node_cli.core.resources.get_disk_size', - return_value=BIG_DISK_SIZE): + with ( + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.configs.env.validate_params', lambda params: None), + ): result = run_command_mock( - 'node_cli.utils.helper.post_request', - resp_mock, - generate, - ['./tests/test-env', '--yes'] + 'node_cli.utils.helper.post_request', resp_mock, generate, ['./tests/test-env', '--yes'] ) - assert result.output == (f'Resource allocation file generated: ' - f'{RESOURCE_ALLOCATION_FILEPATH}\n') + assert result.output == ( + f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}\n' + ) assert result.exit_code == 0 def test_generate_already_exists(resource_alloc_config): resp_mock = response_mock(requests.codes.created) - with mock.patch('node_cli.core.resources.get_disk_size', - return_value=BIG_DISK_SIZE): + with ( + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.configs.env.validate_params', lambda params: None), + ): result = run_command_mock( - 'node_cli.utils.helper.post_request', - resp_mock, - generate, - ['./tests/test-env', '--yes'] + 'node_cli.utils.helper.post_request', resp_mock, generate, ['./tests/test-env', '--yes'] ) assert result.output == 'Resource allocation file is already exists\n' assert result.exit_code == 0 result = run_command_mock( - 'node_cli.utils.helper.post_request', - resp_mock, - generate, - ['./tests/test-env', '--yes', '--force'] + 'node_cli.utils.helper.post_request', + resp_mock, + generate, + ['./tests/test-env', '--yes', '--force'], ) assert result.output == ( - f'Resource allocation file generated: ' - f'{RESOURCE_ALLOCATION_FILEPATH}\n' + f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}\n' ) assert result.exit_code == 0 diff --git a/tests/cli/sync_node_test.py b/tests/cli/sync_node_test.py index 3465bfc8..76d50648 100644 --- a/tests/cli/sync_node_test.py +++ b/tests/cli/sync_node_test.py @@ -35,14 +35,16 @@ init_default_logger() -def test_init_sync(mocked_g_config): +def test_init_sync(mocked_g_config, clean_node_options): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) - with mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch( - 'node_cli.core.node.init_sync_op' - ), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch( - 'node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE - ), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch( - 'node_cli.utils.decorators.is_node_inited', return_value=False + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.node.init_sync_op'), + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), + mock.patch('node_cli.configs.env.validate_params', lambda params: None), ): result = run_command(_init_sync, ['./tests/test-env']) @@ -57,28 +59,27 @@ def test_init_sync(mocked_g_config): def test_init_sync_archive(mocked_g_config, clean_node_options): pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) # with mock.patch('subprocess.run', new=subprocess_run_mock), \ - with mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch( - 'node_cli.operations.base.cleanup_volume_artifacts' - ), mock.patch('node_cli.operations.base.download_skale_node'), mock.patch( - 'node_cli.operations.base.sync_skale_node' - ), mock.patch('node_cli.operations.base.configure_docker'), mock.patch( - 'node_cli.operations.base.prepare_host' - ), mock.patch('node_cli.operations.base.ensure_filestorage_mapping'), mock.patch( - 'node_cli.operations.base.link_env_file' - ), mock.patch('node_cli.operations.base.download_contracts'), mock.patch( - 'node_cli.operations.base.generate_nginx_config' - ), mock.patch('node_cli.operations.base.prepare_block_device'), mock.patch( - 'node_cli.operations.base.update_meta' - ), mock.patch('node_cli.operations.base.update_resource_allocation'), mock.patch( - 'node_cli.operations.base.update_images' - ), mock.patch('node_cli.operations.base.compose_up'), mock.patch( - 'node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE - ), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch( - 'node_cli.utils.decorators.is_node_inited', return_value=False + with ( + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.operations.base.cleanup_volume_artifacts'), + mock.patch('node_cli.operations.base.download_skale_node'), + mock.patch('node_cli.operations.base.sync_skale_node'), + mock.patch('node_cli.operations.base.configure_docker'), + mock.patch('node_cli.operations.base.prepare_host'), + mock.patch('node_cli.operations.base.ensure_filestorage_mapping'), + mock.patch('node_cli.operations.base.link_env_file'), + mock.patch('node_cli.operations.base.generate_nginx_config'), + mock.patch('node_cli.operations.base.prepare_block_device'), + mock.patch('node_cli.operations.base.update_meta'), + mock.patch('node_cli.operations.base.update_resource_allocation'), + mock.patch('node_cli.operations.base.update_images'), + mock.patch('node_cli.operations.base.compose_up'), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), + mock.patch('node_cli.configs.env.validate_params', lambda params: None), ): - result = run_command( - _init_sync, ['./tests/test-env', '--archive', '--historic-state'] - ) + result = run_command(_init_sync, ['./tests/test-env', '--archive', '--historic-state']) node_options = NodeOptions() assert node_options.archive @@ -90,12 +91,13 @@ def test_init_sync_archive(mocked_g_config, clean_node_options): def test_init_sync_historic_state_fail(mocked_g_config, clean_node_options): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) - with mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch( - 'node_cli.core.node.init_sync_op' - ), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch( - 'node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE - ), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch( - 'node_cli.utils.decorators.is_node_inited', return_value=False + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.node.init_sync_op'), + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), ): result = run_command(_init_sync, ['./tests/test-env', '--historic-state']) assert result.exit_code == 1 @@ -105,15 +107,18 @@ def test_init_sync_historic_state_fail(mocked_g_config, clean_node_options): def test_update_sync(mocked_g_config): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) - with mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch( - 'node_cli.core.node.update_sync_op' - ), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch( - 'node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE - ), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch( - 'node_cli.utils.decorators.is_node_inited', return_value=True - ), mock.patch( - 'node_cli.core.node.get_meta_info', - return_value=CliMeta(version='2.6.0', config_stream='3.0.2') + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.node.update_sync_op'), + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), + mock.patch( + 'node_cli.core.node.get_meta_info', + return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), + ), + mock.patch('node_cli.configs.env.validate_params', lambda params: None), ): result = run_command(_update_sync, ['./tests/test-env', '--yes']) assert result.exit_code == 0 diff --git a/tests/cli/validate_test.py b/tests/cli/validate_test.py deleted file mode 100644 index 7a595b87..00000000 --- a/tests/cli/validate_test.py +++ /dev/null @@ -1,67 +0,0 @@ -import json -import pathlib -import shutil - -import pytest - -from node_cli.configs import (CONTRACTS_PATH, G_CONF_HOME, - IMA_CONTRACTS_FILEPATH, MANAGER_CONTRACTS_FILEPATH) -from node_cli.cli.validate import abi -from tests.helper import run_command - - -@pytest.fixture -def contracts_info_dir(): - pathlib.Path(CONTRACTS_PATH).mkdir(parents=True, exist_ok=True) - yield CONTRACTS_PATH - shutil.rmtree(CONTRACTS_PATH) - - -@pytest.fixture -def contract_valid_abi_files(contracts_info_dir): - json_data = {'test': 'abi'} - with open(IMA_CONTRACTS_FILEPATH, 'w') as ima_abi_file: - json.dump(json_data, ima_abi_file) - with open(MANAGER_CONTRACTS_FILEPATH, 'w') as manager_abi_file: - json.dump(json_data, manager_abi_file) - yield IMA_CONTRACTS_FILEPATH, MANAGER_CONTRACTS_FILEPATH - - -@pytest.fixture -def contract_abi_file_invalid(contracts_info_dir): - json_data = {'test': 'abi'} - with open(IMA_CONTRACTS_FILEPATH, 'w') as ima_abi_file: - json.dump(json_data, ima_abi_file) - with open(MANAGER_CONTRACTS_FILEPATH, 'w') as manager_abi_file: - manager_abi_file.write('Invalid json') - yield IMA_CONTRACTS_FILEPATH, MANAGER_CONTRACTS_FILEPATH - - -@pytest.fixture -def contract_abi_file_empty(contracts_info_dir): - json_data = {'test': 'abi'} - with open(IMA_CONTRACTS_FILEPATH, 'w') as ima_abi_file: - json.dump(json_data, ima_abi_file) - yield IMA_CONTRACTS_FILEPATH, MANAGER_CONTRACTS_FILEPATH - - -def test_validate_abi(contract_valid_abi_files): - result = run_command(abi) - assert result.output == 'All abi files are correct json files!\n' - assert result.exit_code == 0 - - -def test_validate_abi_invalid_file(contract_abi_file_invalid): - result = run_command(abi) - assert 'Some files do not exist or are incorrect' in result.output - assert f'{G_CONF_HOME}.skale/contracts_info/manager.json error Failed to load abi file as json' in result.output # noqa - assert f'{G_CONF_HOME}.skale/contracts_info/ima.json ok' in result.output - assert result.exit_code == 0 - - -def test_validate_abi_empty_file(contract_abi_file_empty): - result = run_command(abi) - assert 'Some files do not exist or are incorrect' in result.output - assert f'{G_CONF_HOME}.skale/contracts_info/manager.json error No such file' in result.output # noqa - assert f'{G_CONF_HOME}.skale/contracts_info/ima.json ok' in result.output - assert result.exit_code == 0 diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py new file mode 100644 index 00000000..56bb303e --- /dev/null +++ b/tests/configs/configs_env_validate_test.py @@ -0,0 +1,323 @@ +import os +from typing import Optional +import pytest +import requests + +from node_cli.configs.env import ( + absent_params, + load_env_file, + build_params, + populate_params, + get_env_config, + validate_params, + validate_env_type, + validate_env_alias_or_address, + validate_contract_address, + validate_contract_alias, + get_chain_id, + get_network_metadata, + ContractType, + ALLOWED_ENV_TYPES, +) +from node_cli.utils.exit_codes import CLIExitCodes + + +# ============================================================================= +# Helper fake response for patching requests.get in network helpers +# ============================================================================= +class FakeResponse: + def __init__(self, status_code: int, json_data: Optional[dict] = None): + self.status_code = status_code + self._json_data = json_data or {} + + def json(self): + return self._json_data + + +# ============================================================================= +# Tests for absent_params +# ============================================================================= +class TestAbsentParams: + def test_absent_params_returns_missing_keys(self): + params = { + 'A': '', # missing + 'B': 'value', + 'C': '', # missing + 'MONITORING_CONTAINERS': 'optional', + } + missing = absent_params(params) + # We expect keys A and C to be missing (assuming they are required) + assert 'A' in missing + assert 'C' in missing + # Optional keys should not be flagged + assert 'MONITORING_CONTAINERS' not in missing + + +# ============================================================================= +# Tests for file loading +# ============================================================================= +class TestLoadEnvFile: + def test_load_env_file_nonexistent(self): + with pytest.raises(SystemExit) as excinfo: + load_env_file('nonexistent.env') + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + def test_load_env_file_not_readable(self, tmp_path): + # Create a temporary file and remove read permissions + env_file = tmp_path / 'test.env' + env_file.write_text('KEY=value') + os.chmod(env_file, 0o000) + with pytest.raises(SystemExit) as excinfo: + load_env_file(str(env_file)) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + os.chmod(env_file, 0o644) # reset permissions + + +# ============================================================================= +# Tests for building and populating parameters +# ============================================================================= +class TestBuildAndPopulate: + def test_build_params_sync(self): + params = build_params(sync_node=True) + # Should contain SCHAIN_NAME among required keys. + assert 'SCHAIN_NAME' in params + + def test_build_params_non_sync(self): + params = build_params(sync_node=False) + # Should not contain SCHAIN_NAME (only in sync dictionary) + assert 'SCHAIN_NAME' not in params + + def test_populate_params_updates_from_environ(self, monkeypatch): + # Start with a base dictionary. + params = {'FOO': ''} + monkeypatch.setenv('FOO', 'bar') + populate_params(params) + assert params['FOO'] == 'bar' + + +# ============================================================================= +# Tests for validate_env_type +# ============================================================================= +class TestEnvType: + @pytest.mark.parametrize('env_type', ['mainnet', 'testnet', 'qanet', 'devnet']) + def test_valid_env_types(self, env_type): + # Should pass without exiting + validate_env_type(env_type) + + def test_invalid_env_type(self): + with pytest.raises(SystemExit) as excinfo: + validate_env_type('invalid') + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +# ============================================================================= +# Tests for network helper functions +# ============================================================================= +class TestNetworkHelpers: + def test_get_chain_id_success(self, monkeypatch): + fake_response = FakeResponse(200, {'result': '0x1'}) + + def fake_post(url, json): + return fake_response + + monkeypatch.setattr(requests, 'post', fake_post) + chain_id = get_chain_id('http://localhost:8545') + assert chain_id == 1 + + def test_get_chain_id_failure(self, monkeypatch): + fake_response = FakeResponse(404) + + def fake_post(url, json): + return fake_response + + monkeypatch.setattr(requests, 'post', fake_post) + with pytest.raises(SystemExit) as excinfo: + get_chain_id('http://localhost:8545') + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + def test_get_network_metadata_success(self, requests_mock): + metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + requests_mock.get(metadata_url, json=metadata, status_code=200) + result = get_network_metadata() + assert result == metadata + + def test_get_network_metadata_failure(self, requests_mock): + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + requests_mock.get(metadata_url, status_code=404) + with pytest.raises(SystemExit) as excinfo: + get_network_metadata() + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +# ============================================================================= +# Tests for contract validations +# ============================================================================= +class TestContractValidation: + def test_validate_contract_address_success(self, requests_mock): + # Simulate a valid contract code response. + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json={'result': '0x123'}) + # This call should not exit. + validate_contract_address('0x' + 'a' * 40, endpoint) + + def test_validate_contract_address_no_code(self, requests_mock): + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json={'result': '0x'}) + with pytest.raises(SystemExit) as excinfo: + validate_contract_address('0x' + 'a' * 40, endpoint) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + def test_validate_contract_alias_success(self, requests_mock): + endpoint = 'http://localhost:8545' + # Fake chain ID response. + requests_mock.post(endpoint, json={'result': '0x1'}) + # Fake metadata response. + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} + requests_mock.get(metadata_url, json=metadata, status_code=200) + # Fake deployment URL response. + alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/skale-manager/test-alias.json' + ) + requests_mock.get(alias_url, status_code=200) + validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) + + def test_validate_contract_alias_network_missing(self, requests_mock): + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json={'result': '0x1'}) + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + # Return empty networks list. + requests_mock.get(metadata_url, json={'networks': []}, status_code=200) + with pytest.raises(SystemExit) as excinfo: + validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +# ============================================================================= +# Tests for validate_env_alias_or_address and validate_params +# ============================================================================= +class TestEnvAliasAndParams: + def test_validate_env_alias_or_address_with_address(self, requests_mock): + endpoint = 'http://localhost:8545' + # Provide a fake contract address: 42 characters starting with '0x' + addr = '0x' + 'b' * 40 + # Patch validate_contract_address to succeed + requests_mock.post(endpoint, json={'result': '0x1'}) + validate_env_alias_or_address(addr, ContractType.IMA, endpoint) + + def test_validate_env_alias_or_address_with_alias(self, requests_mock): + endpoint = 'http://localhost:8545' + # For alias, we simulate a valid contract alias check. + # Fake chain ID response: + requests_mock.post(endpoint, json={'result': '0x1'}) + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} + requests_mock.get(metadata_url, json=metadata, status_code=200) + # Fake deployment response. + alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' + ) + requests_mock.get(alias_url, status_code=200) + validate_env_alias_or_address('test-alias', ContractType.IMA, endpoint) + + def test_validate_params_missing_key(self): + # Create a dictionary missing one required key. + populated_params = { + 'CONTAINER_CONFIGS_STREAM': 'value', + 'ENDPOINT': 'http://localhost:8545', + 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS': '', + 'FILEBEAT_HOST': '127.0.0.1:3010', + 'DISK_MOUNTPOINT': '/dev/sss', + 'SGX_SERVER_URL': 'http://127.0.0.1', + 'DOCKER_LVMPY_STREAM': 'value', + 'ENV_TYPE': 'mainnet', + } + with pytest.raises(SystemExit) as excinfo: + validate_params(populated_params) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + def test_validate_params_success(self, valid_env_params, requests_mock): + endpoint = valid_env_params['ENDPOINT'] + # Fake chain ID response. + requests_mock.post(endpoint, json={'result': '0x1'}) + # Fake metadata response. + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} + requests_mock.get(metadata_url, json=metadata, status_code=200) + ima_alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/mainnet-ima/test-ima.json' + ) + requests_mock.get(ima_alias_url, status_code=200) + manager_alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' + ) + requests_mock.get(manager_alias_url, status_code=200) + # Should not exit. + validate_params(valid_env_params) + + +# ============================================================================= +# Tests for get_env_config +# ============================================================================= +class TestGetEnvConfig: + def test_get_env_config_success( + self, valid_env_file, mock_chain_response, mock_networks_metadata, requests_mock + ): + endpoint = 'http://localhost:8545' + # Patch network calls used in validation + requests_mock.post(endpoint, json=mock_chain_response) + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + requests_mock.get(metadata_url, json=mock_networks_metadata, status_code=200) + ima_alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/mainnet-ima/test-ima.json' + ) + requests_mock.get(ima_alias_url, status_code=200) + manager_alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' + ) + requests_mock.get(manager_alias_url, status_code=200) + config = get_env_config(valid_env_file) + # Assert that keys from the env file are present (using string values) + assert config['ENDPOINT'] == 'http://localhost:8545' + # Also check that ENV_TYPE is one of the allowed ones + assert config['ENV_TYPE'] in ALLOWED_ENV_TYPES + + def test_get_env_config_missing_file(self): + with pytest.raises(SystemExit) as excinfo: + get_env_config('nonexistent.env') + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + def test_get_env_config_unreadable_file(self, valid_env_file): + os.chmod(valid_env_file, 0o000) + with pytest.raises(SystemExit) as excinfo: + get_env_config(valid_env_file) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + os.chmod(valid_env_file, 0o644) diff --git a/tests/configs_env_test.py b/tests/configs_env_test.py deleted file mode 100644 index 1fe9ac4e..00000000 --- a/tests/configs_env_test.py +++ /dev/null @@ -1,15 +0,0 @@ -from node_cli.configs.env import NotValidEnvParamsError, validate_params - - -def test_validate_params(): - valid_config = {'ENV_TYPE': 'mainnet'} - validate_params(valid_config) - invalid_config = {'ENV_TYPE': ''} - error = None - try: - validate_params(invalid_config) - except NotValidEnvParamsError as e: - error = e - assert error is not None - earg = 'Allowed ENV_TYPE values are [\'mainnet\', \'testnet\', \'qanet\', \'devnet\']. Actual: ""' # noqa - assert error.args[0] == earg diff --git a/tests/conftest.py b/tests/conftest.py index 824ba93d..69563310 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,10 +16,11 @@ # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see . -""" SKALE config test """ +"""SKALE config test""" import json import os +import tempfile import pathlib import shutil @@ -36,7 +37,7 @@ NGINX_CONTAINER_NAME, REMOVED_CONTAINERS_FOLDER_PATH, STATIC_PARAMS_FILEPATH, - SCHAIN_NODE_DATA_PATH + SCHAIN_NODE_DATA_PATH, ) from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH from node_cli.configs.ssl import SSL_FOLDER_PATH @@ -76,20 +77,6 @@ iptables-persistant: 1.1.3 lvm2: 1.1.1 -testnet: - server: - cpu_total: 4 - cpu_physical: 4 - memory: 32 - swap: 16 - disk: 200000000000 - - packages: - docker: 1.1.3 - docker-compose: 1.1.3 - iptables-persistant: 1.1.3 - lvm2: 1.1.1 - qanet: server: cpu_total: 4 @@ -126,11 +113,7 @@ @pytest.fixture def net_params_file(): with open(STATIC_PARAMS_FILEPATH, 'w') as f: - yaml.dump( - yaml.load(TEST_ENV_PARAMS, Loader=yaml.Loader), - stream=f, - Dumper=yaml.Dumper - ) + yaml.dump(yaml.load(TEST_ENV_PARAMS, Loader=yaml.Loader), stream=f, Dumper=yaml.Dumper) yield STATIC_PARAMS_FILEPATH os.remove(STATIC_PARAMS_FILEPATH) @@ -165,12 +148,7 @@ def dclient(): def simple_image(dclient): name = 'simple-image' try: - dclient.images.build( - tag=name, - rm=True, - nocache=True, - path='tests/simple_container' - ) + dclient.images.build(tag=name, rm=True, nocache=True, path='tests/simple_container') yield name finally: try: @@ -184,9 +162,7 @@ def simple_image(dclient): def docker_hc(dclient): dclient = docker.from_env() return dclient.api.create_host_config( - log_config=docker.types.LogConfig( - type=docker.types.LogConfig.types.JSON - ) + log_config=docker.types.LogConfig(type=docker.types.LogConfig.types.JSON) ) @@ -240,13 +216,7 @@ def nginx_container(dutils, ssl_folder): 'nginx:1.20.2', name=NGINX_CONTAINER_NAME, detach=True, - volumes={ - ssl_folder: { - 'bind': '/ssl', - 'mode': 'ro', - 'propagation': 'slave' - } - } + volumes={ssl_folder: {'bind': '/ssl', 'mode': 'ro', 'propagation': 'slave'}}, ) yield c finally: @@ -321,3 +291,64 @@ def tmp_sync_datadir(): yield TEST_SCHAINS_MNT_DIR_SYNC finally: shutil.rmtree(TEST_SCHAINS_MNT_DIR_SYNC) + + +@pytest.fixture +def valid_env_params(): + """ + Return a dictionary of environment parameters that mimics the contents of test-env. + """ + return { + 'ENDPOINT': 'http://localhost:8545', + 'IMA_ENDPOINT': 'http://127.0.01', + 'DB_USER': 'user', + 'DB_PASSWORD': 'pass', + 'DB_PORT': '3307', + 'CONTAINER_CONFIGS_STREAM': 'master', + 'FILEBEAT_HOST': '127.0.0.1:3010', + 'SGX_SERVER_URL': 'http://127.0.0.1', + 'DISK_MOUNTPOINT': '/dev/sss', + 'DOCKER_LVMPY_STREAM': 'master', + 'ENV_TYPE': 'devnet', + 'SCHAIN_NAME': 'test', + 'ENFORCE_BTRFS': 'False', + 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS': 'test-manager', + 'IMA_CONTRACTS_ALIAS_OR_ADDRESS': 'test-ima', + } + + +@pytest.fixture +def valid_env_file(valid_env_params): + """ + Create a temporary .env file whose contents mimic test-env. + + This file is created using the key/value pairs from valid_env_params, + one per line in the form KEY=VALUE. + """ + with tempfile.NamedTemporaryFile(mode='w', delete=False) as f: + for key, value in valid_env_params.items(): + f.write(f'{key}={value}\n') + file_name = f.name + yield file_name + os.unlink(file_name) + + +@pytest.fixture +def mock_chain_response(): + """Return a fake RPC response for chain ID 1.""" + return { + 'jsonrpc': '2.0', + 'id': 1, + 'result': '0x1', # Represents chain ID 1 + } + + +@pytest.fixture +def mock_networks_metadata(): + """Return fake network metadata that includes chain ID 1.""" + return { + 'networks': [ + {'chainId': 1, 'name': 'Mainnet', 'path': 'mainnet'}, + {'chainId': 2, 'name': 'Testnet', 'path': 'testnet'}, + ] + } diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index f79c6fa3..0257b9cd 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -142,14 +142,16 @@ def test_init_node(no_resource_file): # todo: write new init node test resp_mock = response_mock(requests.codes.created) assert not os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) env_filepath = './tests/test-env' - with mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch( - 'node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE - ), mock.patch('node_cli.core.host.prepare_host'), mock.patch( - 'node_cli.core.host.init_data_dir' - ), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch( - 'node_cli.core.node.init_op' - ), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch( - 'node_cli.utils.helper.post_request', resp_mock + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.core.host.prepare_host'), + mock.patch('node_cli.core.host.init_data_dir'), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.core.node.init_op'), + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.utils.helper.post_request', resp_mock), + mock.patch('node_cli.configs.env.validate_params', lambda params: None), ): init(env_filepath) assert os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) @@ -159,23 +161,26 @@ def test_update_node(mocked_g_config, resource_file): env_filepath = './tests/test-env' resp_mock = response_mock(requests.codes.created) os.makedirs(NODE_DATA_PATH, exist_ok=True) - with mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch( - 'node_cli.core.node.update_op' - ), mock.patch('node_cli.core.node.get_flask_secret_key'), mock.patch( - 'node_cli.core.node.save_env_params' - ), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch( - 'node_cli.core.host.prepare_host' - ), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch( - 'node_cli.utils.helper.post_request', resp_mock - ), mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch( - 'node_cli.core.host.init_data_dir' - ), mock.patch( - 'node_cli.core.node.get_meta_info', - return_value=CliMeta( - version='2.6.0', config_stream='3.0.2' - ) + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.node.update_op'), + mock.patch('node_cli.core.node.get_flask_secret_key'), + mock.patch('node_cli.core.node.save_env_params'), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.core.host.prepare_host'), + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.utils.helper.post_request', resp_mock), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.core.host.init_data_dir'), + mock.patch( + 'node_cli.core.node.get_meta_info', + return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), + ), + mock.patch('node_cli.configs.env.validate_params', lambda params: None), ): - with mock.patch( 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response()): # noqa + with mock.patch( + 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() + ): # noqa result = update(env_filepath, pull_config_for_schain=None) assert result is None @@ -210,7 +215,10 @@ def test_is_update_safe(): def test_repair_sync(tmp_sync_datadir, mocked_g_config, resource_file): - with mock.patch('node_cli.core.schains.rm_btrfs_subvolume'), \ - mock.patch('node_cli.utils.docker_utils.stop_container'), \ - mock.patch('node_cli.utils.docker_utils.start_container'): + with ( + mock.patch('node_cli.core.schains.rm_btrfs_subvolume'), + mock.patch('node_cli.utils.docker_utils.stop_container'), + mock.patch('node_cli.utils.docker_utils.start_container'), + mock.patch('node_cli.configs.env.validate_params', lambda params: None), + ): repair_sync(archive=True, historic_state=True, snapshot_from='127.0.0.1') diff --git a/tests/test-env b/tests/test-env index eb598381..7a5a1931 100644 --- a/tests/test-env +++ b/tests/test-env @@ -1,16 +1,15 @@ -ENDPOINT=127.0.0.1 -IMA_ENDPOINT=127.0.01 +ENDPOINT=http://localhost:8545 +IMA_ENDPOINT=http://127.0.01 DB_USER=user DB_PASSWORD=pass DB_PORT=3307 CONTAINER_CONFIGS_STREAM='master' -MANAGER_CONTRACTS_ABI_URL=http://127.0.0.1 -IMA_CONTRACTS_ABI_URL=http:/127.0.0.1 FILEBEAT_HOST=127.0.0.1:3010 -MANAGER_CONTRACTS_ABI_URL=http://127.0.0.1 SGX_SERVER_URL=http://127.0.0.1 DISK_MOUNTPOINT=/dev/sss DOCKER_LVMPY_STREAM='master' ENV_TYPE='devnet' SCHAIN_NAME='test' -ENFORCE_BTRFS=False \ No newline at end of file +ENFORCE_BTRFS=False +MANAGER_CONTRACTS_ALIAS_OR_ADDRESS='test-manager' +IMA_CONTRACTS_ALIAS_OR_ADDRESS='test-ima' \ No newline at end of file From 204043ac140acbf534c8feb2fd72be1597ed6d93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Wed, 5 Mar 2025 16:06:30 +0000 Subject: [PATCH 009/332] Fix formatting issues in node_test.py causing flake8 to fail --- tests/cli/node_test.py | 52 +++++++++++++++++++++--------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index c0339afd..9caf17d1 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -85,8 +85,8 @@ def test_register_node_with_error(resource_alloc, mocked_g_config): assert result.exit_code == 3 assert ( result.output - == f'Command failed with following errors:\n--------------------------------------------------\nStrange error\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n' - ) # noqa + == f'Command failed with following errors:\n--------------------------------------------------\nStrange error\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n' # noqa + ) def test_register_node_with_prompted_ip(resource_alloc, mocked_g_config): @@ -102,8 +102,8 @@ def test_register_node_with_prompted_ip(resource_alloc, mocked_g_config): assert result.exit_code == 0 assert ( result.output - == 'Enter node public IP: 0.0.0.0\nNode registered in SKALE manager.\nFor more info run < skale node info >\n' - ) # noqa + == 'Enter node public IP: 0.0.0.0\nNode registered in SKALE manager.\nFor more info run < skale node info >\n' # noqa + ) def test_register_node_with_default_port(resource_alloc, mocked_g_config): @@ -119,8 +119,8 @@ def test_register_node_with_default_port(resource_alloc, mocked_g_config): assert result.exit_code == 0 assert ( result.output - == 'Enter node public IP: 0.0.0.0\nNode registered in SKALE manager.\nFor more info run < skale node info >\n' - ) # noqa + == 'Enter node public IP: 0.0.0.0\nNode registered in SKALE manager.\nFor more info run < skale node info >\n' # noqa + ) def test_register_with_no_alloc(mocked_g_config): @@ -135,8 +135,8 @@ def test_register_with_no_alloc(mocked_g_config): assert result.exit_code == 8 assert ( result.output - == f"Enter node public IP: 0.0.0.0\nCommand failed with following errors:\n--------------------------------------------------\nNode hasn't been inited before.\nYou should run < skale node init >\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n" - ) # noqa + == f"Enter node public IP: 0.0.0.0\nCommand failed with following errors:\n--------------------------------------------------\nNode hasn't been inited before.\nYou should run < skale node init >\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n" # noqa + ) def test_node_info_node_info(): @@ -163,8 +163,8 @@ def test_node_info_node_info(): assert result.exit_code == 0 assert ( result.output - == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Active\n--------------------------------------------------\n' - ) # noqa + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Active\n--------------------------------------------------\n' # noqa + ) def test_node_info_node_info_not_created(): @@ -216,8 +216,8 @@ def test_node_info_node_info_frozen(): assert result.exit_code == 0 assert ( result.output - == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Frozen\n--------------------------------------------------\n' - ) # noqa + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Frozen\n--------------------------------------------------\n' # noqa + ) def test_node_info_node_info_left(): @@ -244,8 +244,8 @@ def test_node_info_node_info_left(): assert result.exit_code == 0 assert ( result.output - == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Left\n--------------------------------------------------\n' - ) # noqa + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Left\n--------------------------------------------------\n' # noqa + ) def test_node_info_node_info_leaving(): @@ -272,8 +272,8 @@ def test_node_info_node_info_leaving(): assert result.exit_code == 0 assert ( result.output - == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Leaving\n--------------------------------------------------\n' - ) # noqa + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: Leaving\n--------------------------------------------------\n' # noqa + ) def test_node_info_node_info_in_maintenance(): @@ -300,8 +300,8 @@ def test_node_info_node_info_in_maintenance(): assert result.exit_code == 0 assert ( result.output - == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: In Maintenance\n--------------------------------------------------\n' - ) # noqa + == '--------------------------------------------------\nNode info\nName: test\nID: 32\nIP: 0.0.0.0\nPublic IP: 1.1.1.1\nPort: 10001\nDomain name: skale.test\nStatus: In Maintenance\n--------------------------------------------------\n' # noqa + ) def test_node_signature(): @@ -382,7 +382,7 @@ def test_maintenance_on(): assert ( result.output == 'Setting maintenance mode on...\nNode is successfully set in maintenance mode\n' - ) # noqa + ) def test_maintenance_off(mocked_g_config): @@ -394,7 +394,7 @@ def test_maintenance_off(mocked_g_config): assert ( result.output == 'Setting maintenance mode off...\nNode is successfully removed from maintenance mode\n' - ) # noqa + ) def test_turn_off_maintenance_on(mocked_g_config): @@ -414,7 +414,7 @@ def test_turn_off_maintenance_on(mocked_g_config): assert ( result.output == 'Setting maintenance mode on...\nNode is successfully set in maintenance mode\n' - ) # noqa + ) assert result.exit_code == 0 with mock.patch('node_cli.utils.docker_utils.is_container_running', return_value=True): result = run_command_mock( @@ -448,7 +448,7 @@ def test_turn_on_maintenance_off(mocked_g_config): assert ( result.output == 'Setting maintenance mode off...\nNode is successfully removed from maintenance mode\n' - ) # noqa, tmp fix + ) def test_set_domain_name(): @@ -464,7 +464,7 @@ def test_set_domain_name(): assert result.exit_code == 0 assert ( result.output == 'Setting new domain name: skale.test\nDomain name successfully changed\n' - ) # noqa + ) def test_node_version(meta_file_v2): @@ -473,12 +473,12 @@ def test_node_version(meta_file_v2): assert result.exit_code == 0 assert ( result.output - == '--------------------------------------------------\nVersion: 0.1.1\nConfig Stream: develop\nLvmpy stream: 1.1.2\n--------------------------------------------------\n' - ) # noqa + == '--------------------------------------------------\nVersion: 0.1.1\nConfig Stream: develop\nLvmpy stream: 1.1.2\n--------------------------------------------------\n' # noqa + ) result = run_command(version, ['--json']) assert result.exit_code == 0 assert ( result.output == "{'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_stream': '1.1.2'}\n" - ) # noqa + ) From 195edb969dca4d89ae2dce384660b6b0930c6189 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 6 Mar 2025 12:18:36 +0000 Subject: [PATCH 010/332] Refactored info.py generation into separate script. Added it to README dev preparation so there are no problems with missing info.py. Added info.py to .gitignore. --- .github/workflows/test.yml | 3 +++ README.md | 6 ++++++ node_cli/cli/info.py | 6 ------ scripts/build.sh | 17 +++------------ scripts/generate_info.sh | 42 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 54 insertions(+), 20 deletions(-) delete mode 100644 node_cli/cli/info.py create mode 100755 scripts/generate_info.sh diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index cdd5afdd..bc19da5f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -30,6 +30,9 @@ jobs: python -m pip install --upgrade pip pip install -e .[dev] + - name: Generate info + run: ./scripts/generate_info.sh 1.0.0 my-branch normal + - name: Lint with flake8 run: | flake8 . diff --git a/README.md b/README.md index d1235456..f4e68fa2 100644 --- a/README.md +++ b/README.md @@ -593,6 +593,12 @@ Exit codes conventions for SKALE CLI tools pip install -e .[dev] ``` +#### Generate info.py locally + +```shell +./scripts/generate_info.sh 1.0.0 my-branch normal +``` + ##### Add flake8 git hook In file `.git/hooks/pre-commit` add: diff --git a/node_cli/cli/info.py b/node_cli/cli/info.py deleted file mode 100644 index 6d38d34f..00000000 --- a/node_cli/cli/info.py +++ /dev/null @@ -1,6 +0,0 @@ -BUILD_DATETIME = '' -COMMIT = '' -BRANCH = '' -OS = '' -VERSION = '' -TYPE = '' diff --git a/scripts/build.sh b/scripts/build.sh index 16ff5897..624fcdf4 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -33,20 +33,9 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" PARENT_DIR="$(dirname "$DIR")" OS=`uname -s`-`uname -m` -#CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD) -LATEST_COMMIT=$(git rev-parse HEAD) -CURRENT_DATETIME="`date "+%Y-%m-%d %H:%M:%S"`"; -DIST_INFO_FILEPATH=$PARENT_DIR/node_cli/cli/info.py - -rm $DIST_INFO_FILEPATH -touch $DIST_INFO_FILEPATH - -echo "BUILD_DATETIME = '$CURRENT_DATETIME'" > $DIST_INFO_FILEPATH -echo "COMMIT = '$LATEST_COMMIT'" >> $DIST_INFO_FILEPATH -echo "BRANCH = '$BRANCH'" >> $DIST_INFO_FILEPATH -echo "OS = '$OS'" >> $DIST_INFO_FILEPATH -echo "VERSION = '$VERSION'" >> $DIST_INFO_FILEPATH -echo "TYPE = '$TYPE'" >> $DIST_INFO_FILEPATH + +# Use the new generate_info.sh script +"${DIR}/generate_info.sh" "$VERSION" "$BRANCH" "$TYPE" if [ "$TYPE" = "sync" ]; then EXECUTABLE_NAME=skale-$VERSION-$OS-sync diff --git a/scripts/generate_info.sh b/scripts/generate_info.sh new file mode 100755 index 00000000..d554712a --- /dev/null +++ b/scripts/generate_info.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash +set -e + +VERSION=$1 +BRANCH=$2 +TYPE=$3 + +USAGE_MSG='Usage: generate_info.sh [VERSION] [BRANCH] [TYPE]' + +if [ -z "$VERSION" ]; then + (>&2 echo 'You should provide version') + echo $USAGE_MSG + exit 1 +fi +if [ -z "$BRANCH" ]; then + (>&2 echo 'You should provide git branch') + echo $USAGE_MSG + exit 1 +fi +if [ -z "$TYPE" ]; then + (>&2 echo 'You should provide type: normal or sync') + echo $USAGE_MSG + exit 1 +fi + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +PARENT_DIR="$(dirname "$DIR")" +DIST_INFO_FILEPATH=$PARENT_DIR/node_cli/cli/info.py + +LATEST_COMMIT=$(git rev-parse HEAD) +CURRENT_DATETIME="$(date "+%Y-%m-%d %H:%M:%S")" +OS="$(uname -s)-$(uname -m)" + +rm -f "$DIST_INFO_FILEPATH" +touch "$DIST_INFO_FILEPATH" + +echo "BUILD_DATETIME = '$CURRENT_DATETIME'" >> "$DIST_INFO_FILEPATH" +echo "COMMIT = '$LATEST_COMMIT'" >> "$DIST_INFO_FILEPATH" +echo "BRANCH = '$BRANCH'" >> "$DIST_INFO_FILEPATH" +echo "OS = '$OS'" >> "$DIST_INFO_FILEPATH" +echo "VERSION = '$VERSION'" >> "$DIST_INFO_FILEPATH" +echo "TYPE = '$TYPE'" >> "$DIST_INFO_FILEPATH" From d07e4cae5eb79d0222032e0ddce2e86396c5624e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Fri, 7 Mar 2025 17:29:46 +0000 Subject: [PATCH 011/332] Add contract address validation utility function in helper.py and update contract address validation logic to use this utility function. --- node_cli/configs/env.py | 6 ++---- node_cli/utils/helper.py | 5 +++++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 6ee99022..b8987311 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -11,7 +11,7 @@ from enum import Enum from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH -from node_cli.utils.helper import error_exit +from node_cli.utils.helper import error_exit, is_contract_address from node_cli.utils.exit_codes import CLIExitCodes SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') @@ -144,9 +144,7 @@ def validate_env_alias_or_address( else 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS' ) error_exit(f'{param_name} is not set', CLIExitCodes.FAILURE) - # If alias_or_address is 42 characters and starts with '0x', treat it as a contract address. - # TODO: Add a more robust check for contract address and see if doesn't conflict with alias. - if len(alias_or_address) == 42 and alias_or_address.startswith('0x'): + if is_contract_address(alias_or_address): validate_contract_address(alias_or_address, endpoint) else: validate_contract_alias(alias_or_address, contract_type, endpoint) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 261e61b7..dbff2315 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -445,3 +445,8 @@ def get_ssh_port(ssh_service_name='ssh'): except OSError: logger.exception('Cannot get ssh service port') return DEFAULT_SSH_PORT + + +# TODO: Add a more robust check for contract address and see if doesn't conflict with alias. +def is_contract_address(value: str) -> bool: + return len(value) == 42 and value.startswith('0x') From b8ecb371c392bccc3288ca13084ff0b929dd86da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Fri, 7 Mar 2025 17:34:31 +0000 Subject: [PATCH 012/332] Renamed 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS' and 'IMA_CONTRACTS_ALIAS_OR_ADDRESS' to 'MANAGER_CONTRACTS' and 'IMA_CONTRACTS', respectively. --- README.md | 12 ++++++------ node_cli/configs/env.py | 22 +++++++--------------- tests/configs/configs_env_validate_test.py | 2 +- tests/conftest.py | 4 ++-- tests/test-env | 4 ++-- 5 files changed, 18 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index f4e68fa2..39af2b64 100644 --- a/README.md +++ b/README.md @@ -115,12 +115,12 @@ You should specify the following environment variables: - `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use - `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use - `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` - SKALE Manager main contract alias or address -- `IMA_CONTRACTS_ALIAS_OR_ADDRESS` - IMA main contract alias or address +- `MANAGER_CONTRACTS` - SKALE Manager main contract alias or address +- `IMA_CONTRACTS` - IMA main contract alias or address - `FILEBEAT_URL` - URL to the Filebeat log server - `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') -> In `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` and `IMA_CONTRACTS_ALIAS_OR_ADDRESS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). +> In `MANAGER_CONTRACTS` and `IMA_CONTRACTS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). > :warning: If you are using a custom network or a contract which isn't recognized by underlying skale library, you **MUST** provide a direct contract address. Optional variables: @@ -536,12 +536,12 @@ You should specify the following environment variables: - `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use - `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use - `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` - SKALE Manager main contract alias or address -- `IMA_CONTRACTS_ALIAS_OR_ADDRESS` - IMA main contract alias or address +- `MANAGER_CONTRACTS` - SKALE Manager main contract alias or address +- `IMA_CONTRACTS` - IMA main contract alias or address - `SCHAIN_NAME` - name of the SKALE chain to sync - `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') -> In `MANAGER_CONTRACTS_ALIAS_OR_ADDRESS` and `IMA_CONTRACTS_ALIAS_OR_ADDRESS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). +> In `MANAGER_CONTRACTS` and `IMA_CONTRACTS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). > :warning: If you are using a custom network or a contract which isn't recognized by underlying skale library, you **MUST** provide a direct contract address. Options: diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index b8987311..3b905885 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -30,8 +30,8 @@ class ContractType(Enum): REQUIRED_PARAMS: Dict[str, str] = { 'CONTAINER_CONFIGS_STREAM': '', 'ENDPOINT': '', - 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS': '', - 'IMA_CONTRACTS_ALIAS_OR_ADDRESS': '', + 'MANAGER_CONTRACTS': '', + 'IMA_CONTRACTS': '', 'FILEBEAT_HOST': '', 'DISK_MOUNTPOINT': '', 'SGX_SERVER_URL': '', @@ -43,8 +43,8 @@ class ContractType(Enum): 'SCHAIN_NAME': '', 'CONTAINER_CONFIGS_STREAM': '', 'ENDPOINT': '', - 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS': '', - 'IMA_CONTRACTS_ALIAS_OR_ADDRESS': '', + 'MANAGER_CONTRACTS': '', + 'IMA_CONTRACTS': '', 'DISK_MOUNTPOINT': '', 'DOCKER_LVMPY_STREAM': '', 'ENV_TYPE': '', @@ -116,12 +116,8 @@ def validate_params(params: Dict[str, str]) -> None: validate_env_type(params['ENV_TYPE']) # Get the endpoint explicitly from the params. endpoint = params['ENDPOINT'] - validate_env_alias_or_address( - params['IMA_CONTRACTS_ALIAS_OR_ADDRESS'], ContractType.IMA, endpoint - ) - validate_env_alias_or_address( - params['MANAGER_CONTRACTS_ALIAS_OR_ADDRESS'], ContractType.MANAGER, endpoint - ) + validate_env_alias_or_address(params['IMA_CONTRACTS'], ContractType.IMA, endpoint) + validate_env_alias_or_address(params['MANAGER_CONTRACTS'], ContractType.MANAGER, endpoint) def validate_env_type(env_type: str) -> None: @@ -138,11 +134,7 @@ def validate_env_alias_or_address( ) -> None: """Validate contract alias or address.""" if not alias_or_address: - param_name = ( - 'IMA_CONTRACTS_ALIAS_OR_ADDRESS' - if contract_type == ContractType.IMA - else 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS' - ) + param_name = 'IMA_CONTRACTS' if contract_type == ContractType.IMA else 'MANAGER_CONTRACTS' error_exit(f'{param_name} is not set', CLIExitCodes.FAILURE) if is_contract_address(alias_or_address): validate_contract_address(alias_or_address, endpoint) diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 56bb303e..5e0bf3e9 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -243,7 +243,7 @@ def test_validate_params_missing_key(self): populated_params = { 'CONTAINER_CONFIGS_STREAM': 'value', 'ENDPOINT': 'http://localhost:8545', - 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS': '', + 'MANAGER_CONTRACTS': '', 'FILEBEAT_HOST': '127.0.0.1:3010', 'DISK_MOUNTPOINT': '/dev/sss', 'SGX_SERVER_URL': 'http://127.0.0.1', diff --git a/tests/conftest.py b/tests/conftest.py index 69563310..68ffb959 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -312,8 +312,8 @@ def valid_env_params(): 'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test', 'ENFORCE_BTRFS': 'False', - 'MANAGER_CONTRACTS_ALIAS_OR_ADDRESS': 'test-manager', - 'IMA_CONTRACTS_ALIAS_OR_ADDRESS': 'test-ima', + 'MANAGER_CONTRACTS': 'test-manager', + 'IMA_CONTRACTS': 'test-ima', } diff --git a/tests/test-env b/tests/test-env index 7a5a1931..7698a8b8 100644 --- a/tests/test-env +++ b/tests/test-env @@ -11,5 +11,5 @@ DOCKER_LVMPY_STREAM='master' ENV_TYPE='devnet' SCHAIN_NAME='test' ENFORCE_BTRFS=False -MANAGER_CONTRACTS_ALIAS_OR_ADDRESS='test-manager' -IMA_CONTRACTS_ALIAS_OR_ADDRESS='test-ima' \ No newline at end of file +MANAGER_CONTRACTS='test-manager' +IMA_CONTRACTS='test-ima' \ No newline at end of file From 36844c629974ec606850277fc5cb6c1f91cc8aee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Fri, 7 Mar 2025 18:27:02 +0000 Subject: [PATCH 013/332] Add ruff check to pre-commit hook indication in README --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 39af2b64..e9bbc11f 100644 --- a/README.md +++ b/README.md @@ -606,6 +606,7 @@ In file `.git/hooks/pre-commit` add: ```shell #!/bin/sh flake8 . +ruff check ``` ### Debugging From fdc6f6215f76e079871efb6986fe8a887f504ec5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 8 Apr 2025 12:08:26 +0100 Subject: [PATCH 014/332] Updated helper-scripts version --- helper-scripts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helper-scripts b/helper-scripts index 2541831d..84b57271 160000 --- a/helper-scripts +++ b/helper-scripts @@ -1 +1 @@ -Subproject commit 2541831d3a8bf6691d994f37f379ac36d760c0a4 +Subproject commit 84b572717eef72feb3c901ea8817f9dcddbca8bb From 7b9e2e850b1a6afe94e8a7ccd5ae5f61ad9ad31d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 10 Apr 2025 23:13:35 +0100 Subject: [PATCH 015/332] Address review comments for PR #839 --- .flake8 | 3 - .github/workflows/test.yml | 4 - README.md | 9 +- node_cli/cli/__init__.py | 2 +- node_cli/cli/ssl.py | 69 +-- node_cli/cli/sync_node.py | 5 +- node_cli/configs/env.py | 94 ++-- node_cli/core/host.py | 32 +- node_cli/core/node.py | 8 +- node_cli/main.py | 2 +- node_cli/operations/skale_node.py | 12 +- node_cli/utils/git_utils.py | 53 +- node_cli/utils/helper.py | 24 +- node_cli/utils/print_formatters.py | 4 - ruff.toml | 4 + setup.py | 1 - tests/configs/configs_env_validate_test.py | 532 ++++++++++----------- 17 files changed, 339 insertions(+), 519 deletions(-) delete mode 100644 .flake8 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index a34d601c..00000000 --- a/.flake8 +++ /dev/null @@ -1,3 +0,0 @@ -[flake8] -max-line-length = 100 -exclude = .git,__pycache__,docs/source/conf.py,old,build,dist,venv,helper-scripts,.venv diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index bc19da5f..a8b2eb80 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -33,10 +33,6 @@ jobs: - name: Generate info run: ./scripts/generate_info.sh 1.0.0 my-branch normal - - name: Lint with flake8 - run: | - flake8 . - - name: Check with ruff run: | ruff check diff --git a/README.md b/README.md index 7a3467aa..b2b2a95e 100644 --- a/README.md +++ b/README.md @@ -115,8 +115,8 @@ You should specify the following environment variables: - `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use - `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use - `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS` - SKALE Manager main contract alias or address -- `IMA_CONTRACTS` - IMA main contract alias or address +- `MANAGER_CONTRACTS` - SKALE Manager `message_proxy_mainnet` contract alias or address +- `IMA_CONTRACTS` - IMA `skale_manager` contract alias or address - `FILEBEAT_URL` - URL to the Filebeat log server - `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') @@ -129,8 +129,6 @@ Optional variables: - `TG_CHAT_ID` - Telegram chat ID - `MONITORING_CONTAINERS` - will enable monitoring containers (`cadvisor`, `node-exporter`). -> Filebeat is always enabled and requires `FILEBEAT_URL`, it is **not optional**. - #### Node initialization from backup Restore SKALE node on another machine @@ -616,13 +614,12 @@ pip install -e .[dev] ./scripts/generate_info.sh 1.0.0 my-branch normal ``` -##### Add flake8 git hook +##### Add linting git hook In file `.git/hooks/pre-commit` add: ```shell #!/bin/sh -flake8 . ruff check ``` diff --git a/node_cli/cli/__init__.py b/node_cli/cli/__init__.py index e1e6add7..b0105764 100644 --- a/node_cli/cli/__init__.py +++ b/node_cli/cli/__init__.py @@ -1,4 +1,4 @@ -__version__ = '2.6.2' +__version__ = '3.0.0' if __name__ == '__main__': print(__version__) diff --git a/node_cli/cli/ssl.py b/node_cli/cli/ssl.py index 89b81b51..e0b63f7c 100644 --- a/node_cli/cli/ssl.py +++ b/node_cli/cli/ssl.py @@ -35,12 +35,12 @@ def ssl_cli(): pass -@ssl_cli.group('ssl', help="sChains SSL commands") +@ssl_cli.group('ssl', help='sChains SSL commands') def ssl(): pass -@ssl.command(help="Status of the SSL certificates on the node") +@ssl.command(help='Status of the SSL certificates on the node') def status(): status, payload = cert_status() if status == 'ok': @@ -49,7 +49,7 @@ def status(): else: table_data = [ ['Issued to', payload['issued_to']], - ['Expiration date', payload['expiration_date']] + ['Expiration date', payload['expiration_date']], ] table = SingleTable(table_data) print('SSL certificates status:') @@ -58,69 +58,46 @@ def status(): error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) -@ssl.command(help="Upload new SSL certificates") +@ssl.command(help='Upload new SSL certificates') +@click.option('--key-path', '-k', prompt='Enter path to the key file', help='Path to the key file') @click.option( - '--key-path', '-k', - prompt="Enter path to the key file", - help='Path to the key file' -) -@click.option( - '--cert-path', '-c', - prompt="Enter path to the certificate file", - help='Path to the certificate file' + '--cert-path', + '-c', + prompt='Enter path to the certificate file', + help='Path to the certificate file', ) -@click.option('--force', '-f', is_flag=True, - help='Overwrite existing certificates') +@click.option('--force', '-f', is_flag=True, help='Overwrite existing certificates') def upload(key_path, cert_path, force): status, payload = upload_cert(cert_path, key_path, force) if status == 'ok': print(TEXTS['ssl']['uploaded']) else: - error_exit(payload, exit_code=CLIExitCodes.FAILURE) + error_exit(payload) -@ssl.command(help="Check certificates") -@click.option( - '--key-path', '-k', - help='Path to the key file', - default=SSL_KEY_FILEPATH -) -@click.option( - '--cert-path', '-c', - help='Path to the certificate file', - default=SSL_CERT_FILEPATH -) +@ssl.command(help='Check certificates') +@click.option('--key-path', '-k', help='Path to the key file', default=SSL_KEY_FILEPATH) +@click.option('--cert-path', '-c', help='Path to the certificate file', default=SSL_CERT_FILEPATH) @click.option( - '--port', '-p', + '--port', + '-p', help='Port to start ssl healtcheck server', type=int, - default=DEFAULT_SSL_CHECK_PORT + default=DEFAULT_SSL_CHECK_PORT, ) @click.option( - '--type', '-t', + '--type', + '-t', 'type_', help='Check type', type=click.Choice(['all', 'openssl', 'skaled']), - default='all' -) -@click.option( - '--no-client', - is_flag=True, - help='Skip client connection for openssl check' -) -@click.option( - '--no-wss', - is_flag=True, - help='Skip wss server starting for skaled check' + default='all', ) +@click.option('--no-client', is_flag=True, help='Skip client connection for openssl check') +@click.option('--no-wss', is_flag=True, help='Skip wss server starting for skaled check') def check(key_path, cert_path, port, no_client, type_, no_wss): status, payload = check_cert( - cert_path, - key_path, - port=port, - check_type=type_, - no_client=no_client, - no_wss=no_wss + cert_path, key_path, port=port, check_type=type_, no_client=no_client, no_wss=no_wss ) if status == 'ok': print(TEXTS['ssl']['check_passed']) diff --git a/node_cli/cli/sync_node.py b/node_cli/cli/sync_node.py index 9dc4333a..8e0f41a5 100644 --- a/node_cli/cli/sync_node.py +++ b/node_cli/cli/sync_node.py @@ -59,10 +59,7 @@ def _init_sync( env_file, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str] ) -> None: if indexer and archive: - error_exit( - 'Cannot use both --indexer and --archive options', - exit_code=CLIExitCodes.FAILURE, - ) + error_exit('Cannot use both --indexer and --archive options') init_sync(env_file, indexer, archive, snapshot, snapshot_from) diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 3b905885..4785dffb 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -1,8 +1,21 @@ -"""Environment configuration and validation module for SKALE node. - -This module handles environment variable loading, validation, and configuration -for SKALE node setup. It ensures all required parameters are present and valid. -""" +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2019-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . import os from typing import Dict, List, Optional @@ -66,6 +79,11 @@ class ContractType(Enum): 'SKIP_DOCKER_CLEANUP': '', } +METADATA_URL: str = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' +) + def absent_params(params: Dict[str, str]) -> List[str]: """Return a list of required parameters that are missing or empty.""" @@ -85,12 +103,8 @@ def get_env_config( def load_env_file(env_filepath: str) -> None: """Check and load environment variables from the given file.""" - if not os.path.exists(env_filepath): - error_exit(f'Environment file not found: {env_filepath}', CLIExitCodes.FAILURE) - if not os.access(env_filepath, os.R_OK): - error_exit(f'Cannot read environment file: {env_filepath}', CLIExitCodes.FAILURE) if not load_dotenv(dotenv_path=env_filepath): - error_exit(f'Failed to load environment from {env_filepath}', CLIExitCodes.FAILURE) + error_exit(f'Failed to load environment from {env_filepath}') def build_params(sync_node: bool = False) -> Dict[str, str]: @@ -112,9 +126,8 @@ def validate_params(params: Dict[str, str]) -> None: """Validate environment parameters.""" missing = absent_params(params) if missing: - error_exit(f'Missing required parameters: {missing}', CLIExitCodes.FAILURE) + error_exit(f'Missing required parameters: {missing}') validate_env_type(params['ENV_TYPE']) - # Get the endpoint explicitly from the params. endpoint = params['ENDPOINT'] validate_env_alias_or_address(params['IMA_CONTRACTS'], ContractType.IMA, endpoint) validate_env_alias_or_address(params['MANAGER_CONTRACTS'], ContractType.MANAGER, endpoint) @@ -123,19 +136,13 @@ def validate_params(params: Dict[str, str]) -> None: def validate_env_type(env_type: str) -> None: """Validate the environment type.""" if env_type not in ALLOWED_ENV_TYPES: - error_exit( - f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. Actual: "{env_type}"', - CLIExitCodes.FAILURE, - ) + error_exit(f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. Actual: "{env_type}"') def validate_env_alias_or_address( alias_or_address: str, contract_type: ContractType, endpoint: str ) -> None: """Validate contract alias or address.""" - if not alias_or_address: - param_name = 'IMA_CONTRACTS' if contract_type == ContractType.IMA else 'MANAGER_CONTRACTS' - error_exit(f'{param_name} is not set', CLIExitCodes.FAILURE) if is_contract_address(alias_or_address): validate_contract_address(alias_or_address, endpoint) else: @@ -155,16 +162,20 @@ def validate_contract_address(contract_address: str, endpoint: str) -> None: }, ) if response.status_code != 200: - error_exit( - f'Failed to verify contract at address {contract_address}', CLIExitCodes.FAILURE - ) + error_exit(f'Failed to verify contract at address {contract_address}') result = response.json().get('result') if not result or result in ['0x', '0x0']: - error_exit( - f'No contract code found at address {contract_address}', CLIExitCodes.FAILURE - ) + error_exit(f'No contract code found at address {contract_address}') except requests.RequestException as e: - error_exit(f'Failed to validate contract address: {str(e)}', CLIExitCodes.FAILURE) + error_exit(f'Failed to validate contract address: {str(e)}') + + +def get_deployment_url(alias: str, contract_type: ContractType, network_path: str) -> str: + """Construct the deployment URL for the given contract alias and type.""" + return ( + f'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + f'refs/heads/deployments/{network_path}/{contract_type.value}/{alias}.json' + ) def validate_contract_alias(alias: str, contract_type: ContractType, endpoint: str) -> None: @@ -179,20 +190,13 @@ def validate_contract_alias(alias: str, contract_type: ContractType, endpoint: s network_path = net.get('path') break if not network_path: - error_exit( - f'Network with chain ID {chain_id} not found in metadata', CLIExitCodes.FAILURE - ) - deployment_url = ( - f'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - f'refs/heads/deployments/{network_path}/{contract_type.value}/{alias}.json' - ) + error_exit(f'Network with chain ID {chain_id} not found in metadata') + assert isinstance(network_path, str) + deployment_url = get_deployment_url(alias, contract_type, network_path) if requests.get(deployment_url).status_code != 200: - error_exit( - f"Contract alias '{alias}' not found for {contract_type.value}", - CLIExitCodes.FAILURE, - ) + error_exit(f"Contract alias '{alias}' not found for {contract_type.value}") except requests.RequestException as e: - error_exit(f"Failed to validate contract alias '{alias}': {str(e)}", CLIExitCodes.FAILURE) + error_exit(f"Failed to validate contract alias '{alias}': {str(e)}") def get_chain_id(endpoint: str) -> int: @@ -203,27 +207,23 @@ def get_chain_id(endpoint: str) -> int: json={'jsonrpc': '2.0', 'method': 'eth_chainId', 'params': [], 'id': 1}, ) if response.status_code != 200: - error_exit('Failed to get chain ID from endpoint', CLIExitCodes.FAILURE) + error_exit('Failed to get chain ID from endpoint') return int(response.json()['result'], 16) except requests.RequestException as e: - error_exit(f'Failed to get chain ID: {str(e)}', CLIExitCodes.FAILURE) + error_exit(f'Failed to get chain ID: {str(e)}') # Will never reach this line, but needed for type checking. return 0 def get_network_metadata() -> Dict: """Fetch network metadata from GitHub.""" - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) try: - response = requests.get(metadata_url) + response = requests.get(METADATA_URL) if response.status_code != 200: - error_exit('Failed to fetch networks metadata', CLIExitCodes.FAILURE) + error_exit('Failed to fetch networks metadata') return response.json() except requests.RequestException as e: - error_exit(f'Failed to fetch networks metadata: {str(e)}', CLIExitCodes.FAILURE) + error_exit(f'Failed to fetch networks metadata: {str(e)}') # Will never reach this line, but needed for type checking. return {} diff --git a/node_cli/core/host.py b/node_cli/core/host.py index d66c1a54..b10c98d2 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -79,16 +79,14 @@ def get_flask_secret_key() -> str: secret_key_filepath = os.path.join(NODE_DATA_PATH, 'flask_db_key.txt') if not os.path.exists(secret_key_filepath): - error_exit( - f'Flask secret key file not found at {secret_key_filepath}', CLIExitCodes.FAILURE - ) + error_exit(f'Flask secret key file not found at {secret_key_filepath}') try: with open(secret_key_filepath, 'r') as key_file: secret_key = key_file.read().strip() return secret_key except (IOError, OSError) as e: - error_exit(f'Failed to read Flask secret key: {e}', CLIExitCodes.FAILURE) + error_exit(f'Failed to read Flask secret key: {e}') # Will never reach here, but needed for type checking. return '' @@ -96,7 +94,7 @@ def get_flask_secret_key() -> str: def prepare_host(env_filepath: str, env_type: str, allocation: bool = False) -> None: """Initialize SKALE node host environment.""" if not env_filepath or not env_type: - error_exit('Missing required parameters for host initialization', CLIExitCodes.FAILURE) + error_exit('Missing required parameters for host initialization') try: logger.info('Preparing host started') @@ -106,21 +104,11 @@ def prepare_host(env_filepath: str, env_type: str, allocation: bool = False) -> if allocation: update_resource_allocation(env_type) except Exception as e: - error_exit(f'Failed to prepare host: {str(e)}', CLIExitCodes.FAILURE) + error_exit(f'Failed to prepare host: {str(e)}') def is_node_inited() -> bool: - """Check if the SKALE node has been initialized. - - Determines initialization status by checking for existence of the - resource allocation file. - """ - try: - # Check if resource allocation file exists as initialization indicator - return os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) - except OSError as e: - logger.error(f'Error checking node initialization status: {e}') - return False + return os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) def make_dirs(): @@ -145,15 +133,7 @@ def make_dirs(): def save_env_params(env_filepath: str) -> None: - """Copy environment parameters file to SKALE directory.""" - if not os.path.isfile(env_filepath): - error_exit(f'Environment file not found: {env_filepath}', CLIExitCodes.FAILURE) - if not os.access(env_filepath, os.R_OK): - error_exit(f'Cannot read environment file: {env_filepath}', CLIExitCodes.FAILURE) - try: - copyfile(env_filepath, SKALE_DIR_ENV_FILEPATH) - except (IOError, OSError) as e: - error_exit(f'Failed to copy environment file: {e}', CLIExitCodes.FAILURE) + copyfile(env_filepath, SKALE_DIR_ENV_FILEPATH) def link_env_file(): diff --git a/node_cli/core/node.py b/node_cli/core/node.py index d6420e6a..e8b14f93 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -220,7 +220,7 @@ def cleanup_sync() -> None: def compose_node_env( - env_filepath: Optional[str], + env_filepath: str, inited_node: bool = False, sync_schains: Optional[bool] = None, pull_config_for_schain: Optional[str] = None, @@ -234,10 +234,8 @@ def compose_node_env( else: env_params = get_env_config(INIT_ENV_FILEPATH, sync_node=sync_node) - # Set mount directory based on node type mnt_dir = SCHAINS_MNT_DIR_SYNC if sync_node else SCHAINS_MNT_DIR_REGULAR - # Compose base environment dictionary env = { 'SKALE_DIR': SKALE_DIR, 'SCHAINS_MNT_DIR': mnt_dir, @@ -246,19 +244,15 @@ def compose_node_env( **env_params, } - # Add Flask secret key for initialized non-sync nodes if inited_node and not sync_node: env['FLASK_SECRET_KEY'] = get_flask_secret_key() - # Enable backup run for syncing schains if sync_schains and not sync_node: env['BACKUP_RUN'] = 'True' - # Add schain config pull parameter if specified if pull_config_for_schain: env['PULL_CONFIG_FOR_SCHAIN'] = pull_config_for_schain - # Remove empty values and return return {k: v for k, v in env.items() if v != ''} diff --git a/node_cli/main.py b/node_cli/main.py index d6331fd2..b2f072e6 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -123,5 +123,5 @@ def handle_exception(exc_type, exc_value, exc_traceback): except Exception as err: traceback.print_exc() logger.debug('Execution time: %d seconds', time.time() - start_time) - error_exit(err, CLIExitCodes.FAILURE) + error_exit(err) logger.debug('Execution time: %d seconds', time.time() - start_time) diff --git a/node_cli/operations/skale_node.py b/node_cli/operations/skale_node.py index fd3b6f8a..842a0461 100644 --- a/node_cli/operations/skale_node.py +++ b/node_cli/operations/skale_node.py @@ -43,7 +43,7 @@ def update_images(env: dict, sync_node: bool = False) -> None: def download_skale_node(stream: Optional[str] = None, src: Optional[str] = None) -> None: """Downloads SKALE node config from repo or local directory""" if not src and not stream: - error_exit('Either src path or stream must be provided', exit_code=CLIExitCodes.FAILURE) + error_exit('Either src path or stream must be provided') try: rm_dir(CONTAINER_CONFIG_TMP_PATH) @@ -52,17 +52,13 @@ def download_skale_node(stream: Optional[str] = None, src: Optional[str] = None) if src: if not os.path.isdir(src): - error_exit( - f'Source directory does not exist: {src}', exit_code=CLIExitCodes.FAILURE - ) + error_exit(f'Source directory does not exist: {src}') logger.info(f'Syncing config files from {src}') rsync_dirs(src, dest) - elif stream: + else: + assert stream logger.info(f'Cloning config files from {SKALE_NODE_REPO_URL} ({stream})') clone_repo(SKALE_NODE_REPO_URL, dest, stream) - else: - # Should never reach this point - error_exit('Either src path or stream must be provided', exit_code=CLIExitCodes.FAILURE) except (OSError, RuntimeError) as err: rm_dir(CONTAINER_CONFIG_TMP_PATH) diff --git a/node_cli/utils/git_utils.py b/node_cli/utils/git_utils.py index 4adfebab..96086843 100644 --- a/node_cli/utils/git_utils.py +++ b/node_cli/utils/git_utils.py @@ -29,56 +29,20 @@ def check_is_branch(repo: Repo, ref_name: str) -> bool: - """Check if the given reference name is a valid git branch.""" - if not repo or not isinstance(repo, Repo): - raise ValueError('Invalid repository object') - if not ref_name or not isinstance(ref_name, str): - raise ValueError('Invalid reference name') - - try: - # Verify if ref_name exists as a branch using git show-ref - repo.git.show_ref('--verify', f'refs/heads/{ref_name}') - logger.debug(f'{ref_name} is a branch') - return True - except GitCommandError: - # Expected error when reference is not found - logger.debug(f'{ref_name} is not a branch') - return False - except GitError as e: - # Git-specific errors (permissions, config, etc) - logger.error(f'Git error checking branch: {str(e)}') - raise RuntimeError(f'Git error checking branch: {str(e)}') from e - except Exception as e: - # Unexpected system errors - logger.error(f'Unexpected error checking branch: {str(e)}') - raise RuntimeError(f'Unexpected error checking branch: {str(e)}') from e + return ref_name in (branch.name for branch in repo.heads) def clone_repo(repo_url: str, repo_path: str, ref_name: str) -> None: - """Clone a git repository and checkout specified reference.""" - if not all([repo_url, repo_path, ref_name]): - error_exit('Empty repository URL, path or reference', CLIExitCodes.FAILURE) - if not all(isinstance(x, str) for x in [repo_url, repo_path, ref_name]): - error_exit('Invalid input types', CLIExitCodes.FAILURE) - try: logger.info(f'Cloning {repo_url} → {repo_path}') Repo.clone_from(repo_url, repo_path) fetch_pull_repo(repo_path, ref_name) - except GitError as e: - error_exit( - f'Git error cloning repository: {str(e)}', CLIExitCodes.OPERATION_EXECUTION_ERROR - ) except Exception as e: - error_exit(f'Unexpected error cloning repository: {str(e)}', CLIExitCodes.FAILURE) + error_exit(f'Unexpected error cloning repository: {str(e)}') def sync_repo(repo_url: str, repo_path: str, ref_name: str) -> None: """Sync Git repository by cloning if not exists or fetching latest changes.""" - if not all([repo_url, repo_path, ref_name]): - error_exit('Empty repository URL, path or reference', CLIExitCodes.FAILURE) - if not all(isinstance(x, str) for x in [repo_url, repo_path, ref_name]): - error_exit('Invalid input types', CLIExitCodes.FAILURE) logger.info(f'Sync repo {repo_url} → {repo_path}') if not os.path.isdir(os.path.join(repo_path, '.git')): @@ -89,31 +53,20 @@ def sync_repo(repo_url: str, repo_path: str, ref_name: str) -> None: def fetch_pull_repo(repo_path: str, ref_name: str) -> None: """Fetch latest changes and checkout/pull specific git reference.""" - # Validate inputs - if not repo_path or not isinstance(repo_path, str): - error_exit('Invalid repository path', CLIExitCodes.FAILURE) - if not ref_name or not isinstance(ref_name, str): - error_exit('Invalid reference name', CLIExitCodes.FAILURE) try: - # Initialize repo and get name for logging repo = Repo(repo_path) repo_name = os.path.basename(repo.working_dir) - # Fetch latest changes logger.info(f'Fetching latest changes for {repo_name}') repo.remotes.origin.fetch() - # Checkout specified reference logger.info(f'Checking out {ref_name} in {repo_name}') repo.git.checkout(ref_name) - # Pull latest changes if ref is a branch if check_is_branch(repo, ref_name): logger.info(f'Pulling latest changes for branch {ref_name}') repo.remotes.origin.pull() - except GitError as e: - error_exit(f'Git operation failed: {str(e)}', CLIExitCodes.OPERATION_EXECUTION_ERROR) except Exception as e: - error_exit(f'Repository operation failed: {str(e)}', CLIExitCodes.FAILURE) + error_exit(f'Repository operation failed: {str(e)}') diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 0fcc7c9b..27c13b2d 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -339,10 +339,6 @@ def rm_dir(folder: str) -> None: def safe_mkdir(path: str, print_res: bool = False) -> None: - """Create a directory if it doesn't exist.""" - if not isinstance(path, str): - error_exit(f'path must be a string, got {type(path)}', exit_code=CLIExitCodes.FAILURE) - if os.path.exists(path): logger.debug(f'Directory {path} already exists') return @@ -352,31 +348,13 @@ def safe_mkdir(path: str, print_res: bool = False) -> None: if print_res: print(msg) - try: - os.makedirs(path, exist_ok=True) - except OSError as e: - logger.error(f'Failed to create directory {path}: {e}') - error_exit( - f'Failed to create directory {path}: {e}', - exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR, - ) + os.makedirs(path, exist_ok=True) def rsync_dirs(src: str, dest: str) -> None: - """Synchronize two directories using rsync.""" - if not isinstance(src, str) or not isinstance(dest, str): - error_exit('Source and destination paths must be strings', exit_code=CLIExitCodes.FAILURE) - - if not src.strip() or not dest.strip(): - error_exit('Source and destination paths cannot be empty', exit_code=CLIExitCodes.FAILURE) - - if not os.path.isdir(src): - error_exit(f'Source directory does not exist: {src}', exit_code=CLIExitCodes.FAILURE) - logger.info(f'Syncing directory {dest} with {src}') try: - # Sync all files including hidden ones run_cmd(['rsync', '-r', f'{src}/', dest]) run_cmd(['rsync', '-r', f'{src}/.git', dest]) except subprocess.CalledProcessError as e: diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index 844cc949..b5448d45 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -254,13 +254,10 @@ def print_err_response(error_payload: Any) -> None: """ try: if isinstance(error_payload, (list, tuple)): - # Join list items with newlines for multiple errors error_msg = '\n'.join(str(err) for err in error_payload) elif isinstance(error_payload, dict): - # Format dict as JSON string error_msg = json.dumps(error_payload, indent=2) else: - # Convert any other type to string error_msg = str(error_payload) print('Command failed with following errors:') @@ -270,7 +267,6 @@ def print_err_response(error_payload: Any) -> None: print(f'You can find more info in {DEBUG_LOG_FILEPATH}') except Exception as e: - # Fallback for unexpected errors while formatting print('Error occurred while processing error payload:') print(LONG_LINE) print(f'Original error payload: {error_payload}') diff --git a/ruff.toml b/ruff.toml index d823f4d6..9653b675 100644 --- a/ruff.toml +++ b/ruff.toml @@ -2,3 +2,7 @@ line-length = 100 [format] quote-style = "single" + +[lint] +# Add the `line-too-long` rule to the enforced rule set. +extend-select = ["E501"] \ No newline at end of file diff --git a/setup.py b/setup.py index 02ee5fb6..ebd27ab1 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,6 @@ def find_version(*file_paths): extras_require = { 'linter': [ - 'flake8==7.1.1', 'isort>=4.2.15,<5.10.2', 'ruff==0.9.9', ], diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 5e0bf3e9..06fe6d1c 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -22,9 +22,6 @@ from node_cli.utils.exit_codes import CLIExitCodes -# ============================================================================= -# Helper fake response for patching requests.get in network helpers -# ============================================================================= class FakeResponse: def __init__(self, status_code: int, json_data: Optional[dict] = None): self.status_code = status_code @@ -34,290 +31,249 @@ def json(self): return self._json_data -# ============================================================================= -# Tests for absent_params -# ============================================================================= -class TestAbsentParams: - def test_absent_params_returns_missing_keys(self): - params = { - 'A': '', # missing - 'B': 'value', - 'C': '', # missing - 'MONITORING_CONTAINERS': 'optional', - } - missing = absent_params(params) - # We expect keys A and C to be missing (assuming they are required) - assert 'A' in missing - assert 'C' in missing - # Optional keys should not be flagged - assert 'MONITORING_CONTAINERS' not in missing - - -# ============================================================================= -# Tests for file loading -# ============================================================================= -class TestLoadEnvFile: - def test_load_env_file_nonexistent(self): - with pytest.raises(SystemExit) as excinfo: - load_env_file('nonexistent.env') - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - def test_load_env_file_not_readable(self, tmp_path): - # Create a temporary file and remove read permissions - env_file = tmp_path / 'test.env' - env_file.write_text('KEY=value') - os.chmod(env_file, 0o000) - with pytest.raises(SystemExit) as excinfo: - load_env_file(str(env_file)) - assert excinfo.value.code == CLIExitCodes.FAILURE.value - os.chmod(env_file, 0o644) # reset permissions - - -# ============================================================================= -# Tests for building and populating parameters -# ============================================================================= -class TestBuildAndPopulate: - def test_build_params_sync(self): - params = build_params(sync_node=True) - # Should contain SCHAIN_NAME among required keys. - assert 'SCHAIN_NAME' in params - - def test_build_params_non_sync(self): - params = build_params(sync_node=False) - # Should not contain SCHAIN_NAME (only in sync dictionary) - assert 'SCHAIN_NAME' not in params - - def test_populate_params_updates_from_environ(self, monkeypatch): - # Start with a base dictionary. - params = {'FOO': ''} - monkeypatch.setenv('FOO', 'bar') - populate_params(params) - assert params['FOO'] == 'bar' - - -# ============================================================================= -# Tests for validate_env_type -# ============================================================================= -class TestEnvType: - @pytest.mark.parametrize('env_type', ['mainnet', 'testnet', 'qanet', 'devnet']) - def test_valid_env_types(self, env_type): - # Should pass without exiting - validate_env_type(env_type) - - def test_invalid_env_type(self): - with pytest.raises(SystemExit) as excinfo: - validate_env_type('invalid') - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - -# ============================================================================= -# Tests for network helper functions -# ============================================================================= -class TestNetworkHelpers: - def test_get_chain_id_success(self, monkeypatch): - fake_response = FakeResponse(200, {'result': '0x1'}) - - def fake_post(url, json): - return fake_response - - monkeypatch.setattr(requests, 'post', fake_post) - chain_id = get_chain_id('http://localhost:8545') - assert chain_id == 1 - - def test_get_chain_id_failure(self, monkeypatch): - fake_response = FakeResponse(404) - - def fake_post(url, json): - return fake_response - - monkeypatch.setattr(requests, 'post', fake_post) - with pytest.raises(SystemExit) as excinfo: - get_chain_id('http://localhost:8545') - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - def test_get_network_metadata_success(self, requests_mock): - metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - requests_mock.get(metadata_url, json=metadata, status_code=200) - result = get_network_metadata() - assert result == metadata - - def test_get_network_metadata_failure(self, requests_mock): - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - requests_mock.get(metadata_url, status_code=404) - with pytest.raises(SystemExit) as excinfo: - get_network_metadata() - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - -# ============================================================================= -# Tests for contract validations -# ============================================================================= -class TestContractValidation: - def test_validate_contract_address_success(self, requests_mock): - # Simulate a valid contract code response. - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json={'result': '0x123'}) - # This call should not exit. - validate_contract_address('0x' + 'a' * 40, endpoint) +def test_absent_params_returns_missing_keys(self): + params = { + 'A': '', + 'B': 'value', + 'C': '', + 'MONITORING_CONTAINERS': 'optional', + } + missing = absent_params(params) + assert 'A' in missing + assert 'C' in missing + assert 'MONITORING_CONTAINERS' not in missing + + +def test_load_env_file_nonexistent(self): + with pytest.raises(SystemExit) as excinfo: + load_env_file('nonexistent.env') + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +def test_load_env_file_not_readable(self, tmp_path): + # Create a temporary file and remove read permissions + env_file = tmp_path / 'test.env' + env_file.write_text('KEY=value') + os.chmod(env_file, 0o000) + with pytest.raises(SystemExit) as excinfo: + load_env_file(str(env_file)) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + os.chmod(env_file, 0o644) # reset permissions + + +def test_build_params_sync(self): + params = build_params(sync_node=True) + assert 'SCHAIN_NAME' in params + + +def test_build_params_non_sync(self): + params = build_params(sync_node=False) + assert 'SCHAIN_NAME' not in params + + +def test_populate_params_updates_from_environ(self, monkeypatch): + params = {'FOO': ''} + monkeypatch.setenv('FOO', 'bar') + populate_params(params) + assert params['FOO'] == 'bar' + + +@pytest.mark.parametrize('env_type', ['mainnet', 'testnet', 'qanet', 'devnet']) +def test_valid_env_types(self, env_type): + validate_env_type(env_type) + + +def test_invalid_env_type(self): + with pytest.raises(SystemExit) as excinfo: + validate_env_type('invalid') + assert excinfo.value.code == CLIExitCodes.FAILURE.value - def test_validate_contract_address_no_code(self, requests_mock): - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json={'result': '0x'}) - with pytest.raises(SystemExit) as excinfo: - validate_contract_address('0x' + 'a' * 40, endpoint) - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - def test_validate_contract_alias_success(self, requests_mock): - endpoint = 'http://localhost:8545' - # Fake chain ID response. - requests_mock.post(endpoint, json={'result': '0x1'}) - # Fake metadata response. - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} - requests_mock.get(metadata_url, json=metadata, status_code=200) - # Fake deployment URL response. - alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/skale-manager/test-alias.json' - ) - requests_mock.get(alias_url, status_code=200) - validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) - def test_validate_contract_alias_network_missing(self, requests_mock): - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json={'result': '0x1'}) - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - # Return empty networks list. - requests_mock.get(metadata_url, json={'networks': []}, status_code=200) - with pytest.raises(SystemExit) as excinfo: - validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - -# ============================================================================= -# Tests for validate_env_alias_or_address and validate_params -# ============================================================================= -class TestEnvAliasAndParams: - def test_validate_env_alias_or_address_with_address(self, requests_mock): - endpoint = 'http://localhost:8545' - # Provide a fake contract address: 42 characters starting with '0x' - addr = '0x' + 'b' * 40 - # Patch validate_contract_address to succeed - requests_mock.post(endpoint, json={'result': '0x1'}) - validate_env_alias_or_address(addr, ContractType.IMA, endpoint) - - def test_validate_env_alias_or_address_with_alias(self, requests_mock): - endpoint = 'http://localhost:8545' - # For alias, we simulate a valid contract alias check. - # Fake chain ID response: - requests_mock.post(endpoint, json={'result': '0x1'}) - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} - requests_mock.get(metadata_url, json=metadata, status_code=200) - # Fake deployment response. - alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' - ) - requests_mock.get(alias_url, status_code=200) - validate_env_alias_or_address('test-alias', ContractType.IMA, endpoint) - - def test_validate_params_missing_key(self): - # Create a dictionary missing one required key. - populated_params = { - 'CONTAINER_CONFIGS_STREAM': 'value', - 'ENDPOINT': 'http://localhost:8545', - 'MANAGER_CONTRACTS': '', - 'FILEBEAT_HOST': '127.0.0.1:3010', - 'DISK_MOUNTPOINT': '/dev/sss', - 'SGX_SERVER_URL': 'http://127.0.0.1', - 'DOCKER_LVMPY_STREAM': 'value', - 'ENV_TYPE': 'mainnet', - } - with pytest.raises(SystemExit) as excinfo: - validate_params(populated_params) - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - def test_validate_params_success(self, valid_env_params, requests_mock): - endpoint = valid_env_params['ENDPOINT'] - # Fake chain ID response. - requests_mock.post(endpoint, json={'result': '0x1'}) - # Fake metadata response. - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} - requests_mock.get(metadata_url, json=metadata, status_code=200) - ima_alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/mainnet-ima/test-ima.json' - ) - requests_mock.get(ima_alias_url, status_code=200) - manager_alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' - ) - requests_mock.get(manager_alias_url, status_code=200) - # Should not exit. - validate_params(valid_env_params) - - -# ============================================================================= -# Tests for get_env_config -# ============================================================================= -class TestGetEnvConfig: - def test_get_env_config_success( - self, valid_env_file, mock_chain_response, mock_networks_metadata, requests_mock - ): - endpoint = 'http://localhost:8545' - # Patch network calls used in validation - requests_mock.post(endpoint, json=mock_chain_response) - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - requests_mock.get(metadata_url, json=mock_networks_metadata, status_code=200) - ima_alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/mainnet-ima/test-ima.json' - ) - requests_mock.get(ima_alias_url, status_code=200) - manager_alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' - ) - requests_mock.get(manager_alias_url, status_code=200) - config = get_env_config(valid_env_file) - # Assert that keys from the env file are present (using string values) - assert config['ENDPOINT'] == 'http://localhost:8545' - # Also check that ENV_TYPE is one of the allowed ones - assert config['ENV_TYPE'] in ALLOWED_ENV_TYPES - - def test_get_env_config_missing_file(self): - with pytest.raises(SystemExit) as excinfo: - get_env_config('nonexistent.env') - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - def test_get_env_config_unreadable_file(self, valid_env_file): - os.chmod(valid_env_file, 0o000) - with pytest.raises(SystemExit) as excinfo: - get_env_config(valid_env_file) - assert excinfo.value.code == CLIExitCodes.FAILURE.value - os.chmod(valid_env_file, 0o644) +def test_get_chain_id_success(self, monkeypatch): + fake_response = FakeResponse(200, {'result': '0x1'}) + + def fake_post(url, json): + return fake_response + + monkeypatch.setattr(requests, 'post', fake_post) + chain_id = get_chain_id('http://localhost:8545') + assert chain_id == 1 + + +def test_get_chain_id_failure(self, monkeypatch): + fake_response = FakeResponse(404) + + def fake_post(url, json): + return fake_response + + monkeypatch.setattr(requests, 'post', fake_post) + with pytest.raises(SystemExit) as excinfo: + get_chain_id('http://localhost:8545') + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +def test_get_network_metadata_success(self, requests_mock): + metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + requests_mock.get(metadata_url, json=metadata, status_code=200) + result = get_network_metadata() + assert result == metadata + + +def test_get_network_metadata_failure(self, requests_mock): + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + requests_mock.get(metadata_url, status_code=404) + with pytest.raises(SystemExit) as excinfo: + get_network_metadata() + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +def test_validate_contract_address_success(self, requests_mock): + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json={'result': '0x123'}) + validate_contract_address('0x' + 'a' * 40, endpoint) + + +def test_validate_contract_address_no_code(self, requests_mock): + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json={'result': '0x'}) + with pytest.raises(SystemExit) as excinfo: + validate_contract_address('0x' + 'a' * 40, endpoint) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +def test_validate_contract_alias_success(self, requests_mock): + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json={'result': '0x1'}) + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} + requests_mock.get(metadata_url, json=metadata, status_code=200) + alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/skale-manager/test-alias.json' + ) + requests_mock.get(alias_url, status_code=200) + validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) + + +def test_validate_contract_alias_network_missing(self, requests_mock): + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json={'result': '0x1'}) + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + requests_mock.get(metadata_url, json={'networks': []}, status_code=200) + with pytest.raises(SystemExit) as excinfo: + validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +def test_validate_env_alias_or_address_with_address(self, requests_mock): + endpoint = 'http://localhost:8545' + addr = '0x' + 'b' * 40 + requests_mock.post(endpoint, json={'result': '0x1'}) + validate_env_alias_or_address(addr, ContractType.IMA, endpoint) + + +def test_validate_env_alias_or_address_with_alias(self, requests_mock): + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json={'result': '0x1'}) + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} + requests_mock.get(metadata_url, json=metadata, status_code=200) + alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' + ) + requests_mock.get(alias_url, status_code=200) + validate_env_alias_or_address('test-alias', ContractType.IMA, endpoint) + + +def test_validate_params_missing_key(self): + populated_params = { + 'CONTAINER_CONFIGS_STREAM': 'value', + 'ENDPOINT': 'http://localhost:8545', + 'MANAGER_CONTRACTS': '', + 'FILEBEAT_HOST': '127.0.0.1:3010', + 'DISK_MOUNTPOINT': '/dev/sss', + 'SGX_SERVER_URL': 'http://127.0.0.1', + 'DOCKER_LVMPY_STREAM': 'value', + 'ENV_TYPE': 'mainnet', + } + with pytest.raises(SystemExit) as excinfo: + validate_params(populated_params) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +def test_validate_params_success(self, valid_env_params, requests_mock): + endpoint = valid_env_params['ENDPOINT'] + requests_mock.post(endpoint, json={'result': '0x1'}) + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} + requests_mock.get(metadata_url, json=metadata, status_code=200) + ima_alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/mainnet-ima/test-ima.json' + ) + requests_mock.get(ima_alias_url, status_code=200) + manager_alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' + ) + requests_mock.get(manager_alias_url, status_code=200) + validate_params(valid_env_params) + + +def test_get_env_config_success( + self, valid_env_file, mock_chain_response, mock_networks_metadata, requests_mock +): + endpoint = 'http://localhost:8545' + requests_mock.post(endpoint, json=mock_chain_response) + metadata_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' + ) + requests_mock.get(metadata_url, json=mock_networks_metadata, status_code=200) + ima_alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/mainnet-ima/test-ima.json' + ) + requests_mock.get(ima_alias_url, status_code=200) + manager_alias_url = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' + ) + requests_mock.get(manager_alias_url, status_code=200) + config = get_env_config(valid_env_file) + assert config['ENDPOINT'] == 'http://localhost:8545' + assert config['ENV_TYPE'] in ALLOWED_ENV_TYPES + + +def test_get_env_config_missing_file(self): + with pytest.raises(SystemExit) as excinfo: + get_env_config('nonexistent.env') + assert excinfo.value.code == CLIExitCodes.FAILURE.value + + +def test_get_env_config_unreadable_file(self, valid_env_file): + os.chmod(valid_env_file, 0o000) + with pytest.raises(SystemExit) as excinfo: + get_env_config(valid_env_file) + assert excinfo.value.code == CLIExitCodes.FAILURE.value + os.chmod(valid_env_file, 0o644) From 5d78875ea52600c9dc801f3f6ef931cd6b897c5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 10 Apr 2025 23:17:38 +0100 Subject: [PATCH 016/332] Removed unused imports --- node_cli/cli/sync_node.py | 1 - node_cli/configs/env.py | 1 - node_cli/core/host.py | 1 - node_cli/main.py | 1 - node_cli/utils/git_utils.py | 2 -- 5 files changed, 6 deletions(-) diff --git a/node_cli/cli/sync_node.py b/node_cli/cli/sync_node.py index 8e0f41a5..ea4ee280 100644 --- a/node_cli/cli/sync_node.py +++ b/node_cli/cli/sync_node.py @@ -29,7 +29,6 @@ streamed_cmd, URL_TYPE, ) -from node_cli.utils.exit_codes import CLIExitCodes G_TEXTS = safe_load_texts() diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 4785dffb..2af3fec0 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -25,7 +25,6 @@ from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH from node_cli.utils.helper import error_exit, is_contract_address -from node_cli.utils.exit_codes import CLIExitCodes SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') diff --git a/node_cli/core/host.py b/node_cli/core/host.py index b10c98d2..7f484fb2 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -24,7 +24,6 @@ from node_cli.core.resources import update_resource_allocation from node_cli.utils.helper import error_exit -from node_cli.utils.exit_codes import CLIExitCodes from node_cli.configs import ( ADMIN_PORT, diff --git a/node_cli/main.py b/node_cli/main.py index b2f072e6..8320331f 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -39,7 +39,6 @@ from node_cli.cli.resources_allocation import resources_allocation_cli from node_cli.cli.sync_node import sync_node_cli -from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import safe_load_texts, init_default_logger from node_cli.configs import LONG_LINE from node_cli.core.host import init_logs_dir diff --git a/node_cli/utils/git_utils.py b/node_cli/utils/git_utils.py index 96086843..76e541a1 100644 --- a/node_cli/utils/git_utils.py +++ b/node_cli/utils/git_utils.py @@ -21,8 +21,6 @@ import logging from git.repo.base import Repo -from git.exc import GitCommandError, GitError -from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit logger = logging.getLogger(__name__) From 22faaaad36701aff2187db31194cf2371ed0863e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Fri, 11 Apr 2025 13:44:19 +0100 Subject: [PATCH 017/332] Removes unnecessary 'self' parameter from test functions --- tests/configs/configs_env_validate_test.py | 46 +++++++++++----------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 06fe6d1c..c79f6f07 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -31,7 +31,7 @@ def json(self): return self._json_data -def test_absent_params_returns_missing_keys(self): +def test_absent_params_returns_missing_keys(): params = { 'A': '', 'B': 'value', @@ -44,13 +44,13 @@ def test_absent_params_returns_missing_keys(self): assert 'MONITORING_CONTAINERS' not in missing -def test_load_env_file_nonexistent(self): +def test_load_env_file_nonexistent(): with pytest.raises(SystemExit) as excinfo: load_env_file('nonexistent.env') assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_load_env_file_not_readable(self, tmp_path): +def test_load_env_file_not_readable(tmp_path): # Create a temporary file and remove read permissions env_file = tmp_path / 'test.env' env_file.write_text('KEY=value') @@ -61,17 +61,17 @@ def test_load_env_file_not_readable(self, tmp_path): os.chmod(env_file, 0o644) # reset permissions -def test_build_params_sync(self): +def test_build_params_sync(): params = build_params(sync_node=True) assert 'SCHAIN_NAME' in params -def test_build_params_non_sync(self): +def test_build_params_non_sync(): params = build_params(sync_node=False) assert 'SCHAIN_NAME' not in params -def test_populate_params_updates_from_environ(self, monkeypatch): +def test_populate_params_updates_from_environ(monkeypatch): params = {'FOO': ''} monkeypatch.setenv('FOO', 'bar') populate_params(params) @@ -79,17 +79,17 @@ def test_populate_params_updates_from_environ(self, monkeypatch): @pytest.mark.parametrize('env_type', ['mainnet', 'testnet', 'qanet', 'devnet']) -def test_valid_env_types(self, env_type): +def test_valid_env_types(env_type): validate_env_type(env_type) -def test_invalid_env_type(self): +def test_invalid_env_type(): with pytest.raises(SystemExit) as excinfo: validate_env_type('invalid') assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_get_chain_id_success(self, monkeypatch): +def test_get_chain_id_success(monkeypatch): fake_response = FakeResponse(200, {'result': '0x1'}) def fake_post(url, json): @@ -100,7 +100,7 @@ def fake_post(url, json): assert chain_id == 1 -def test_get_chain_id_failure(self, monkeypatch): +def test_get_chain_id_failure(monkeypatch): fake_response = FakeResponse(404) def fake_post(url, json): @@ -112,7 +112,7 @@ def fake_post(url, json): assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_get_network_metadata_success(self, requests_mock): +def test_get_network_metadata_success(requests_mock): metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} metadata_url = ( 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' @@ -123,7 +123,7 @@ def test_get_network_metadata_success(self, requests_mock): assert result == metadata -def test_get_network_metadata_failure(self, requests_mock): +def test_get_network_metadata_failure(requests_mock): metadata_url = ( 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' 'refs/heads/deployments/metadata.json' @@ -134,13 +134,13 @@ def test_get_network_metadata_failure(self, requests_mock): assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_validate_contract_address_success(self, requests_mock): +def test_validate_contract_address_success(requests_mock): endpoint = 'http://localhost:8545' requests_mock.post(endpoint, json={'result': '0x123'}) validate_contract_address('0x' + 'a' * 40, endpoint) -def test_validate_contract_address_no_code(self, requests_mock): +def test_validate_contract_address_no_code(requests_mock): endpoint = 'http://localhost:8545' requests_mock.post(endpoint, json={'result': '0x'}) with pytest.raises(SystemExit) as excinfo: @@ -148,7 +148,7 @@ def test_validate_contract_address_no_code(self, requests_mock): assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_validate_contract_alias_success(self, requests_mock): +def test_validate_contract_alias_success(requests_mock): endpoint = 'http://localhost:8545' requests_mock.post(endpoint, json={'result': '0x1'}) metadata_url = ( @@ -165,7 +165,7 @@ def test_validate_contract_alias_success(self, requests_mock): validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) -def test_validate_contract_alias_network_missing(self, requests_mock): +def test_validate_contract_alias_network_missing(requests_mock): endpoint = 'http://localhost:8545' requests_mock.post(endpoint, json={'result': '0x1'}) metadata_url = ( @@ -178,14 +178,14 @@ def test_validate_contract_alias_network_missing(self, requests_mock): assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_validate_env_alias_or_address_with_address(self, requests_mock): +def test_validate_env_alias_or_address_with_address(requests_mock): endpoint = 'http://localhost:8545' addr = '0x' + 'b' * 40 requests_mock.post(endpoint, json={'result': '0x1'}) validate_env_alias_or_address(addr, ContractType.IMA, endpoint) -def test_validate_env_alias_or_address_with_alias(self, requests_mock): +def test_validate_env_alias_or_address_with_alias(requests_mock): endpoint = 'http://localhost:8545' requests_mock.post(endpoint, json={'result': '0x1'}) metadata_url = ( @@ -202,7 +202,7 @@ def test_validate_env_alias_or_address_with_alias(self, requests_mock): validate_env_alias_or_address('test-alias', ContractType.IMA, endpoint) -def test_validate_params_missing_key(self): +def test_validate_params_missing_key(): populated_params = { 'CONTAINER_CONFIGS_STREAM': 'value', 'ENDPOINT': 'http://localhost:8545', @@ -218,7 +218,7 @@ def test_validate_params_missing_key(self): assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_validate_params_success(self, valid_env_params, requests_mock): +def test_validate_params_success(valid_env_params, requests_mock): endpoint = valid_env_params['ENDPOINT'] requests_mock.post(endpoint, json={'result': '0x1'}) metadata_url = ( @@ -241,7 +241,7 @@ def test_validate_params_success(self, valid_env_params, requests_mock): def test_get_env_config_success( - self, valid_env_file, mock_chain_response, mock_networks_metadata, requests_mock + valid_env_file, mock_chain_response, mock_networks_metadata, requests_mock ): endpoint = 'http://localhost:8545' requests_mock.post(endpoint, json=mock_chain_response) @@ -265,13 +265,13 @@ def test_get_env_config_success( assert config['ENV_TYPE'] in ALLOWED_ENV_TYPES -def test_get_env_config_missing_file(self): +def test_get_env_config_missing_file(): with pytest.raises(SystemExit) as excinfo: get_env_config('nonexistent.env') assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_get_env_config_unreadable_file(self, valid_env_file): +def test_get_env_config_unreadable_file(valid_env_file): os.chmod(valid_env_file, 0o000) with pytest.raises(SystemExit) as excinfo: get_env_config(valid_env_file) From 88631a8e37d063d9d436a14a3ae47fe5d6fd9d4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Fri, 11 Apr 2025 14:04:08 +0100 Subject: [PATCH 018/332] Added exception handling during environment file loading. --- node_cli/configs/env.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 2af3fec0..9bc33747 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -102,8 +102,11 @@ def get_env_config( def load_env_file(env_filepath: str) -> None: """Check and load environment variables from the given file.""" - if not load_dotenv(dotenv_path=env_filepath): - error_exit(f'Failed to load environment from {env_filepath}') + try: + if not load_dotenv(dotenv_path=env_filepath): + error_exit(f'Failed to load environment from {env_filepath}') + except Exception as e: + error_exit(f'An unexpected error occurred loading environment file {env_filepath}: {e}') def build_params(sync_node: bool = False) -> Dict[str, str]: From c04b56cd1e1b1c3d18a7004bd873f9772ab149e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 14 Apr 2025 15:07:39 +0100 Subject: [PATCH 019/332] Addresses comments from PR#839 - Refactored environment validation into two files; - Updated environment validation function names for improved clarity; - Fixed minor docstring inconsistencies and formatting issues across multiple modules. --- node_cli/configs/alias_address_validation.py | 118 +++++++++++++++ node_cli/configs/env.py | 147 ++----------------- node_cli/configs/routes.py | 2 +- node_cli/core/host.py | 4 - node_cli/core/nftables.py | 47 +++--- node_cli/core/node.py | 10 +- node_cli/core/resources.py | 4 +- node_cli/core/schains.py | 4 +- node_cli/operations/skale_node.py | 2 +- node_cli/utils/exit_codes.py | 3 +- node_cli/utils/git_utils.py | 4 +- node_cli/utils/global_config.py | 9 +- node_cli/utils/helper.py | 9 +- tests/cli/node_test.py | 8 +- tests/cli/resources_allocation_test.py | 6 +- tests/cli/sync_node_test.py | 6 +- tests/configs/configs_env_validate_test.py | 89 ++++++----- tests/conftest.py | 31 ++-- tests/core/core_node_test.py | 4 +- tests/core/nftables_test.py | 22 +-- 20 files changed, 250 insertions(+), 279 deletions(-) create mode 100644 node_cli/configs/alias_address_validation.py diff --git a/node_cli/configs/alias_address_validation.py b/node_cli/configs/alias_address_validation.py new file mode 100644 index 00000000..ea74b81d --- /dev/null +++ b/node_cli/configs/alias_address_validation.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2019-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from enum import Enum +from typing import Dict, Optional + +import requests + +from node_cli.utils.helper import error_exit, is_contract_address + + +METADATA_URL: str = ( + 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + 'refs/heads/deployments/metadata.json' +) + + +class ContractType(Enum): + """Contract types supported by the system using skale-contracts library.""" + + IMA = 'mainnet-ima' + MANAGER = 'skale-manager' + + +def validate_env_alias_or_address( + alias_or_address: str, contract_type: ContractType, endpoint: str +) -> None: + if is_contract_address(alias_or_address): + validate_contract_address(alias_or_address, endpoint) + else: + validate_contract_alias(alias_or_address, contract_type, endpoint) + + +def validate_contract_address(contract_address: str, endpoint: str) -> None: + try: + response = requests.post( + endpoint, + json={ + 'jsonrpc': '2.0', + 'method': 'eth_getCode', + 'params': [contract_address, 'latest'], + 'id': 1, + }, + ) + if response.status_code != 200: + error_exit(f'Failed to verify contract at address {contract_address}') + result = response.json().get('result') + if not result or result in ['0x', '0x0']: + error_exit(f'No contract code found at address {contract_address}') + except requests.RequestException as e: + error_exit(f'Failed to validate contract address: {str(e)}') + + +def get_deployment_url(alias: str, contract_type: ContractType, network_path: str) -> str: + return ( + f'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' + f'refs/heads/deployments/{network_path}/{contract_type.value}/{alias}.json' + ) + + +def validate_contract_alias(alias: str, contract_type: ContractType, endpoint: str) -> None: + try: + chain_id = get_chain_id(endpoint) + metadata = get_network_metadata() + networks = metadata.get('networks', []) + network_path: Optional[str] = None + for net in networks: + if net.get('chainId') == chain_id: + network_path = net.get('path') + break + if not network_path: + error_exit(f'Network with chain ID {chain_id} not found in metadata') + if not isinstance(network_path, str): + error_exit(f'Invalid network path type: {network_path}') + deployment_url = get_deployment_url(alias, contract_type, network_path) + if requests.get(deployment_url).status_code != 200: + error_exit(f"Contract alias '{alias}' not found for {contract_type.value}") + except requests.RequestException as e: + error_exit(f"Failed to validate contract alias '{alias}': {str(e)}") + + +def get_chain_id(endpoint: str) -> int: + try: + response = requests.post( + endpoint, + json={'jsonrpc': '2.0', 'method': 'eth_chainId', 'params': [], 'id': 1}, + ) + if response.status_code != 200: + error_exit('Failed to get chain ID from endpoint') + return int(response.json()['result'], 16) + except requests.RequestException as e: + error_exit(f'Failed to get chain ID: {str(e)}') + + +def get_network_metadata() -> Dict: + try: + response = requests.get(METADATA_URL) + if response.status_code != 200: + error_exit('Failed to fetch networks metadata') + return response.json() + except requests.RequestException as e: + error_exit(f'Failed to fetch networks metadata: {str(e)}') diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 9bc33747..c6528421 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -18,27 +18,19 @@ # along with this program. If not, see . import os -from typing import Dict, List, Optional +from typing import Dict, List + from dotenv import load_dotenv -import requests -from enum import Enum from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH -from node_cli.utils.helper import error_exit, is_contract_address +from node_cli.configs.alias_address_validation import validate_env_alias_or_address, ContractType +from node_cli.utils.helper import error_exit SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') ALLOWED_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet'] - -class ContractType(Enum): - """Contract types supported by the system with skale-contracts integration.""" - - IMA = 'mainnet-ima' - MANAGER = 'skale-manager' - - REQUIRED_PARAMS: Dict[str, str] = { 'CONTAINER_CONFIGS_STREAM': '', 'ENDPOINT': '', @@ -78,55 +70,42 @@ class ContractType(Enum): 'SKIP_DOCKER_CLEANUP': '', } -METADATA_URL: str = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' -) - -def absent_params(params: Dict[str, str]) -> List[str]: - """Return a list of required parameters that are missing or empty.""" +def absent_required_params(params: Dict[str, str]) -> List[str]: return [key for key in params if key not in OPTIONAL_PARAMS and not params[key]] -def get_env_config( +def get_validated_env_config( env_filepath: str = SKALE_DIR_ENV_FILEPATH, sync_node: bool = False ) -> Dict[str, str]: - """Load and validate environment configuration from a file.""" load_env_file(env_filepath) - params = build_params(sync_node) - populate_params(params) - validate_params(params) + params = build_env_params(sync_node) + populate_env_params(params) + validate_env_params(params) return params def load_env_file(env_filepath: str) -> None: - """Check and load environment variables from the given file.""" - try: - if not load_dotenv(dotenv_path=env_filepath): - error_exit(f'Failed to load environment from {env_filepath}') - except Exception as e: - error_exit(f'An unexpected error occurred loading environment file {env_filepath}: {e}') + if not load_dotenv(dotenv_path=env_filepath): + error_exit(f'Failed to load environment from {env_filepath}') -def build_params(sync_node: bool = False) -> Dict[str, str]: - """Return a dictionary of parameters based on node type.""" +def build_env_params(sync_node: bool = False) -> Dict[str, str]: + """Return environment variables dictionary with keys based on node type.""" params = REQUIRED_PARAMS_SYNC.copy() if sync_node else REQUIRED_PARAMS.copy() params.update(OPTIONAL_PARAMS) return params -def populate_params(params: Dict[str, str]) -> None: - """Populate params dictionary with environment variable values.""" +def populate_env_params(params: Dict[str, str]) -> None: for key in params: env_value = os.getenv(key) if env_value is not None: params[key] = str(env_value) -def validate_params(params: Dict[str, str]) -> None: - """Validate environment parameters.""" - missing = absent_params(params) +def validate_env_params(params: Dict[str, str]) -> None: + missing = absent_required_params(params) if missing: error_exit(f'Missing required parameters: {missing}') validate_env_type(params['ENV_TYPE']) @@ -136,99 +115,5 @@ def validate_params(params: Dict[str, str]) -> None: def validate_env_type(env_type: str) -> None: - """Validate the environment type.""" if env_type not in ALLOWED_ENV_TYPES: error_exit(f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. Actual: "{env_type}"') - - -def validate_env_alias_or_address( - alias_or_address: str, contract_type: ContractType, endpoint: str -) -> None: - """Validate contract alias or address.""" - if is_contract_address(alias_or_address): - validate_contract_address(alias_or_address, endpoint) - else: - validate_contract_alias(alias_or_address, contract_type, endpoint) - - -def validate_contract_address(contract_address: str, endpoint: str) -> None: - """Validate if the given contract address has deployed code.""" - try: - response = requests.post( - endpoint, - json={ - 'jsonrpc': '2.0', - 'method': 'eth_getCode', - 'params': [contract_address, 'latest'], - 'id': 1, - }, - ) - if response.status_code != 200: - error_exit(f'Failed to verify contract at address {contract_address}') - result = response.json().get('result') - if not result or result in ['0x', '0x0']: - error_exit(f'No contract code found at address {contract_address}') - except requests.RequestException as e: - error_exit(f'Failed to validate contract address: {str(e)}') - - -def get_deployment_url(alias: str, contract_type: ContractType, network_path: str) -> str: - """Construct the deployment URL for the given contract alias and type.""" - return ( - f'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - f'refs/heads/deployments/{network_path}/{contract_type.value}/{alias}.json' - ) - - -def validate_contract_alias(alias: str, contract_type: ContractType, endpoint: str) -> None: - """Validate if the given contract alias exists in deployments for the current network.""" - try: - chain_id = get_chain_id(endpoint) - metadata = get_network_metadata() - networks = metadata.get('networks', []) - network_path: Optional[str] = None - for net in networks: - if net.get('chainId') == chain_id: - network_path = net.get('path') - break - if not network_path: - error_exit(f'Network with chain ID {chain_id} not found in metadata') - assert isinstance(network_path, str) - deployment_url = get_deployment_url(alias, contract_type, network_path) - if requests.get(deployment_url).status_code != 200: - error_exit(f"Contract alias '{alias}' not found for {contract_type.value}") - except requests.RequestException as e: - error_exit(f"Failed to validate contract alias '{alias}': {str(e)}") - - -def get_chain_id(endpoint: str) -> int: - """Fetch chain ID from the JSON-RPC endpoint.""" - try: - response = requests.post( - endpoint, - json={'jsonrpc': '2.0', 'method': 'eth_chainId', 'params': [], 'id': 1}, - ) - if response.status_code != 200: - error_exit('Failed to get chain ID from endpoint') - return int(response.json()['result'], 16) - except requests.RequestException as e: - error_exit(f'Failed to get chain ID: {str(e)}') - # Will never reach this line, but needed for type checking. - return 0 - - -def get_network_metadata() -> Dict: - """Fetch network metadata from GitHub.""" - try: - response = requests.get(METADATA_URL) - if response.status_code != 200: - error_exit('Failed to fetch networks metadata') - return response.json() - except requests.RequestException as e: - error_exit(f'Failed to fetch networks metadata: {str(e)}') - # Will never reach this line, but needed for type checking. - return {} - - -class NotValidEnvParamsError(Exception): - """Raised when environment parameters are invalid or missing.""" diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index 285e56bd..967881c3 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -46,7 +46,7 @@ class RouteNotFoundException(Exception): - """Raised when requested route is not found in provided API version""" + """Raised when requested route is not found in provided API version.""" def route_exists(blueprint, method, api_version): diff --git a/node_cli/core/host.py b/node_cli/core/host.py index 7f484fb2..f4d4e790 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -74,7 +74,6 @@ def fix_url(url): def get_flask_secret_key() -> str: - """Retrieve Flask secret key from filesystem.""" secret_key_filepath = os.path.join(NODE_DATA_PATH, 'flask_db_key.txt') if not os.path.exists(secret_key_filepath): @@ -86,12 +85,9 @@ def get_flask_secret_key() -> str: return secret_key except (IOError, OSError) as e: error_exit(f'Failed to read Flask secret key: {e}') - # Will never reach here, but needed for type checking. - return '' def prepare_host(env_filepath: str, env_type: str, allocation: bool = False) -> None: - """Initialize SKALE node host environment.""" if not env_filepath or not env_type: error_exit('Missing required parameters for host initialization') diff --git a/node_cli/core/nftables.py b/node_cli/core/nftables.py index f46568f3..50afff82 100644 --- a/node_cli/core/nftables.py +++ b/node_cli/core/nftables.py @@ -32,7 +32,7 @@ NFTABLES_CHAIN_FOLDER_PATH, NFTABLES_MAIN_CONFIG_PATH, NFTABLES_SKALE_BASE_CONFIG_PATH, - NFTABLES_USER_CONFIG_PATH + NFTABLES_USER_CONFIG_PATH, ) from node_cli.utils.helper import get_ssh_port, run_cmd @@ -95,13 +95,7 @@ def __post_init__(self): if self.first_port is not None and self.last_port is None: self.last_port = self.first_port if all( - val is None - for val in ( - self.first_port, - self.last_port, - self.protocol, - self.icmp_type - ) + val is None for val in (self.first_port, self.last_port, self.protocol, self.icmp_type) ): raise NFTablesError('Rule has no meaningful fields') @@ -179,9 +173,9 @@ def update_chain_policy( chain: str, policy: str = POLICY, family: Optional[str] = None, - table: Optional[str] = None + table: Optional[str] = None, ) -> None: - """Update specified chain if it exists. Otherwise do nothing""" + """Update specified chain if it exists. Otherwise do nothing.""" family = family or self.family table = table or self.table if self.chain_exists(chain, family=family): @@ -211,7 +205,7 @@ def table_exists(self) -> bool: return False def create_table_if_not_exists(self) -> None: - """Create table only if it doesn't exist""" + """Create table only if it doesn't exist.""" if not self.table_exists(): cmd = {'nftables': [{'add': {'table': {'family': self.family, 'name': self.table}}}]} self.execute_cmd(cmd) @@ -220,7 +214,7 @@ def create_table_if_not_exists(self) -> None: logger.info('Table already exists: %s', self.table) def get_rules(self, chain: str) -> list[dict]: - """Get existing rules for a chain""" + """Get existing rules for a chain.""" try: cmd = f'list chain {self.family} {self.table} {chain}' rc, output, error = self.nft.cmd(cmd) @@ -249,7 +243,6 @@ def rule_exists(self, chain: str, new_rule_expr: list[dict]) -> bool: return False def add_drop_rule(self, rule: Rule) -> None: - expr = [] if rule.first_port: @@ -395,7 +388,7 @@ def add_rule(self, rule: Rule) -> None: rule.chain, rule.protocol, rule.first_port, - rule.last_port + rule.last_port, ) else: logger.info( @@ -403,7 +396,7 @@ def add_rule(self, rule: Rule) -> None: rule.chain, rule.protocol, rule.first_port, - rule.last_port + rule.last_port, ) def remove_rule(self, rule: Rule) -> None: @@ -464,7 +457,7 @@ def remove_rule(self, rule: Rule) -> None: rule.chain, rule.protocol, rule.first_port, - rule.last_port + rule.last_port, ) else: logger.info( @@ -472,7 +465,7 @@ def remove_rule(self, rule: Rule) -> None: rule.chain, rule.protocol, rule.first_port, - rule.last_port + rule.last_port, ) def add_connection_tracking_rule(self, chain: str) -> None: @@ -548,7 +541,7 @@ def get_base_ruleset(self) -> str: return output def setup_firewall(self, enable_monitoring: bool = False) -> None: - """Setup firewall rules""" + """Setup firewall rules.""" logger.info('Configuring firewall rules') try: @@ -568,7 +561,7 @@ def setup_firewall(self, enable_monitoring: bool = False) -> None: ServicePort.DNS, ServicePort.HTTPS, ServicePort.HTTP, - ServicePort.WATCHDOG + ServicePort.WATCHDOG, ] if enable_monitoring: tcp_ports.extend([ServicePort.EXPORTER, ServicePort.CADVISOR]) @@ -587,17 +580,14 @@ def setup_firewall(self, enable_monitoring: bool = False) -> None: chain=self.chain, first_port=SGXPort.HTTPS, last_port=SGXPort.ZMQ, - protocol='tcp' + protocol='tcp', ) ) self.add_drop_rule(Rule(chain=self.chain, protocol='udp')) logger.info('Making sure legacy chain has default policy %s', POLICY) self.update_chain_policy( - chain=LEGACY_CHAIN, - policy=POLICY, - family=LEGACY_FAMILY, - table=LEGACY_TABLE + chain=LEGACY_CHAIN, policy=POLICY, family=LEGACY_FAMILY, table=LEGACY_TABLE ) except Exception as e: @@ -606,7 +596,7 @@ def setup_firewall(self, enable_monitoring: bool = False) -> None: logger.info('Firewall rules are configured') def cleanup_legacy_rules(self, ssh: bool = False, dns: bool = False) -> None: - """Cleanups all node-cli generated rules""" + """Cleans up all node-cli generated rules.""" self.remove_drop_rule('tcp') self.remove_drop_rule('udp') tcp_ports = [ @@ -624,7 +614,7 @@ def cleanup_legacy_rules(self, ssh: bool = False, dns: bool = False) -> None: self.remove_rule(Rule(chain=self.chain, protocol='udp', first_port=ServicePort.DNS)) def flush_chain(self, chain: str) -> None: - """Remove all rules from a specific chain""" + """Remove all rules from a specific chain.""" json_cmd = { 'nftables': [ {'flush': {'chain': {'family': self.family, 'table': self.table, 'name': chain}}} @@ -678,10 +668,7 @@ def create_user_config_path() -> None: def update_main_nftables_config() -> None: logger.info('Updating main nftables rules') - content = ( - f'#!/usr/sbin/nft -f\nflush ruleset\n' - f'include "{NFTABLES_SKALE_BASE_CONFIG_PATH}";' - ) + content = f'#!/usr/sbin/nft -f\nflush ruleset\ninclude "{NFTABLES_SKALE_BASE_CONFIG_PATH}";' with open(NFTABLES_MAIN_CONFIG_PATH, 'w') as f: f.write(content) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index e8b14f93..ff5da975 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -42,7 +42,7 @@ TM_INIT_TIMEOUT, ) from node_cli.cli import __version__ -from node_cli.configs.env import get_env_config, SKALE_DIR_ENV_FILEPATH +from node_cli.configs.env import get_validated_env_config, SKALE_DIR_ENV_FILEPATH from node_cli.configs.cli_logger import LOG_DATA_PATH as CLI_LOG_DATA_PATH from node_cli.core.host import is_node_inited, save_env_params, get_flask_secret_key @@ -86,7 +86,7 @@ class NodeStatuses(Enum): - """This class contains possible node statuses""" + """This class contains possible node statuses.""" ACTIVE = 0 LEAVING = 1 @@ -228,11 +228,11 @@ def compose_node_env( save: bool = True, ) -> dict: if env_filepath is not None: - env_params = get_env_config(env_filepath, sync_node=sync_node) + env_params = get_validated_env_config(env_filepath, sync_node=sync_node) if save: save_env_params(env_filepath) else: - env_params = get_env_config(INIT_ENV_FILEPATH, sync_node=sync_node) + env_params = get_validated_env_config(INIT_ENV_FILEPATH, sync_node=sync_node) mnt_dir = SCHAINS_MNT_DIR_SYNC if sync_node else SCHAINS_MNT_DIR_REGULAR @@ -459,7 +459,7 @@ def run_checks( return if disk is None: - env = get_env_config() + env = get_validated_env_config() disk = env['DISK_MOUNTPOINT'] failed_checks = run_host_checks(disk, network, container_config_path) if not failed_checks: diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 64467c23..94f3081d 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -24,7 +24,7 @@ import psutil -from node_cli.configs.env import get_env_config +from node_cli.configs.env import get_validated_env_config from node_cli.utils.docker_utils import ensure_volume from node_cli.utils.schain_types import SchainTypes from node_cli.utils.helper import write_json, read_json, run_cmd, safe_load_yml @@ -97,7 +97,7 @@ def generate_resource_allocation_config(env_file, force=False) -> None: logger.debug(msg) print(msg) return - env_params = get_env_config(env_file) + env_params = get_validated_env_config(env_file) if env_params is None: return logger.info('Generating resource allocation file ...') diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 9b6c625c..44c6d83a 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -15,7 +15,7 @@ SCHAIN_NODE_DATA_PATH, SCHAINS_MNT_DIR_SYNC, ) -from node_cli.configs.env import get_env_config +from node_cli.configs.env import get_validated_env_config from node_cli.utils.helper import get_request, error_exit, safe_load_yml from node_cli.utils.exit_codes import CLIExitCodes @@ -185,7 +185,7 @@ def restore_schain_from_snapshot( schain: str, snapshot_path: str, env_type: Optional[str] = None, schain_type: str = 'medium' ) -> None: if env_type is None: - env_config = get_env_config() + env_config = get_validated_env_config() env_type = env_config['ENV_TYPE'] ensure_schain_volume(schain, schain_type, env_type) block_number = get_block_number_from_path(snapshot_path) diff --git a/node_cli/operations/skale_node.py b/node_cli/operations/skale_node.py index 842a0461..39e5b4c7 100644 --- a/node_cli/operations/skale_node.py +++ b/node_cli/operations/skale_node.py @@ -41,7 +41,7 @@ def update_images(env: dict, sync_node: bool = False) -> None: def download_skale_node(stream: Optional[str] = None, src: Optional[str] = None) -> None: - """Downloads SKALE node config from repo or local directory""" + """Copies SKALE node config from local directory if present. If not, downloads it from repo.""" if not src and not stream: error_exit('Either src path or stream must be provided') diff --git a/node_cli/utils/exit_codes.py b/node_cli/utils/exit_codes.py index 85656fb1..cbe0c696 100644 --- a/node_cli/utils/exit_codes.py +++ b/node_cli/utils/exit_codes.py @@ -21,7 +21,8 @@ class CLIExitCodes(IntEnum): - """This class contains exit codes for SKALE CLI tools""" + """This class contains exit codes for SKALE CLI tools.""" + SUCCESS = 0 FAILURE = 1 BAD_API_RESPONSE = 3 diff --git a/node_cli/utils/git_utils.py b/node_cli/utils/git_utils.py index 76e541a1..429c310f 100644 --- a/node_cli/utils/git_utils.py +++ b/node_cli/utils/git_utils.py @@ -40,7 +40,9 @@ def clone_repo(repo_url: str, repo_path: str, ref_name: str) -> None: def sync_repo(repo_url: str, repo_path: str, ref_name: str) -> None: - """Sync Git repository by cloning if not exists or fetching latest changes.""" + """ + Sync Git repository by cloning if it doesn't exist locally. If it exists, fetch latest changes. + """ logger.info(f'Sync repo {repo_url} → {repo_path}') if not os.path.isdir(os.path.join(repo_path, '.git')): diff --git a/node_cli/utils/global_config.py b/node_cli/utils/global_config.py index 4a1c4eaa..4347c6b0 100644 --- a/node_cli/utils/global_config.py +++ b/node_cli/utils/global_config.py @@ -34,7 +34,7 @@ def get_home_dir() -> str: def read_g_config(g_skale_dir: str, g_skale_conf_filepath: str) -> dict: - """Read global SKALE config file, init if not exists""" + """Read global SKALE config file, init if it doesn't exist.""" if not os.path.isfile(g_skale_conf_filepath): return generate_g_config_file(g_skale_dir, g_skale_conf_filepath) with open(g_skale_conf_filepath, encoding='utf-8') as data_file: @@ -42,13 +42,10 @@ def read_g_config(g_skale_dir: str, g_skale_conf_filepath: str) -> dict: def generate_g_config_file(g_skale_dir: str, g_skale_conf_filepath: str) -> dict: - """Init global SKALE config file""" + """Init global SKALE config file.""" print('Generating global SKALE config file...') os.makedirs(g_skale_dir, exist_ok=True) - g_config = { - 'user': get_system_user(), - 'home_dir': get_home_dir() - } + g_config = {'user': get_system_user(), 'home_dir': get_home_dir()} print(f'{g_skale_conf_filepath} content: {g_config}') try: with open(g_skale_conf_filepath, 'w') as outfile: diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 27c13b2d..07dbef19 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -25,7 +25,7 @@ import sys import uuid from urllib.parse import urlparse -from typing import Any, Optional +from typing import Any, Optional, NoReturn import yaml import shutil @@ -157,7 +157,7 @@ def str_to_bool(val): return bool(distutils.util.strtobool(val)) -def error_exit(error_payload: Any, exit_code: CLIExitCodes = CLIExitCodes.FAILURE) -> None: +def error_exit(error_payload: Any, exit_code: CLIExitCodes = CLIExitCodes.FAILURE) -> NoReturn: """Print error message and exit the program with specified exit code. Args: @@ -299,7 +299,7 @@ def to_camel_case(snake_str): def streamed_cmd(func): - """Decorator that allow function to print logs into stderr""" + """Decorator that allows function to print logs into stderr.""" @wraps(func) def wrapper(*args, **kwargs): @@ -414,6 +414,5 @@ def get_ssh_port(ssh_service_name='ssh'): return DEFAULT_SSH_PORT -# TODO: Add a more robust check for contract address and see if doesn't conflict with alias. def is_contract_address(value: str) -> bool: - return len(value) == 42 and value.startswith('0x') + return bool(re.fullmatch(r'0x[a-fA-F0-9]{40}', value)) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 9caf17d1..a94e4157 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -338,7 +338,7 @@ def test_restore(mocked_g_config): return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), ), patch('node_cli.operations.base.configure_nftables'), - patch('node_cli.configs.env.validate_params', lambda params: None), + patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command(restore_node, [backup_path, './tests/test-env']) assert result.exit_code == 0 @@ -364,7 +364,7 @@ def test_restore_no_snapshot(mocked_g_config): return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), ), patch('node_cli.operations.base.configure_nftables'), - patch('node_cli.configs.env.validate_params', lambda params: None), + patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command(restore_node, [backup_path, './tests/test-env', '--no-snapshot']) assert result.exit_code == 0 @@ -403,7 +403,7 @@ def test_turn_off_maintenance_on(mocked_g_config): mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch('node_cli.core.node.turn_off_op'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - patch('node_cli.configs.env.validate_params', lambda params: None), + patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', @@ -435,7 +435,7 @@ def test_turn_on_maintenance_off(mocked_g_config): mock.patch('node_cli.core.node.turn_on_op'), mock.patch('node_cli.core.node.is_base_containers_alive'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - patch('node_cli.configs.env.validate_params', lambda params: None), + patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', diff --git a/tests/cli/resources_allocation_test.py b/tests/cli/resources_allocation_test.py index 8836435f..a169d7ac 100644 --- a/tests/cli/resources_allocation_test.py +++ b/tests/cli/resources_allocation_test.py @@ -56,7 +56,7 @@ def test_generate(): resp_mock = response_mock(requests.codes.created) with ( mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.configs.env.validate_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command_mock( 'node_cli.utils.helper.post_request', resp_mock, generate, ['./tests/test-env', '--yes'] @@ -71,12 +71,12 @@ def test_generate_already_exists(resource_alloc_config): resp_mock = response_mock(requests.codes.created) with ( mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.configs.env.validate_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command_mock( 'node_cli.utils.helper.post_request', resp_mock, generate, ['./tests/test-env', '--yes'] ) - assert result.output == 'Resource allocation file is already exists\n' + assert result.output == 'Resource allocation file already exists\n' assert result.exit_code == 0 result = run_command_mock( diff --git a/tests/cli/sync_node_test.py b/tests/cli/sync_node_test.py index 4cc4b375..db24799f 100644 --- a/tests/cli/sync_node_test.py +++ b/tests/cli/sync_node_test.py @@ -45,7 +45,7 @@ def test_init_sync(mocked_g_config, clean_node_options): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.env.validate_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command(_init_sync, ['./tests/test-env']) @@ -78,7 +78,7 @@ def test_init_sync_archive(mocked_g_config, clean_node_options): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.env.validate_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command(_init_sync, ['./tests/test-env', '--archive']) node_options = NodeOptions() @@ -121,7 +121,7 @@ def test_update_sync(mocked_g_config): 'node_cli.core.node.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), - mock.patch('node_cli.configs.env.validate_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), ): result = run_command(_update_sync, ['./tests/test-env', '--yes']) assert result.exit_code == 0 diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index c79f6f07..240fadf4 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -4,23 +4,27 @@ import requests from node_cli.configs.env import ( - absent_params, + absent_required_params, load_env_file, - build_params, - populate_params, - get_env_config, - validate_params, + build_env_params, + populate_env_params, + get_validated_env_config, + validate_env_params, validate_env_type, + ALLOWED_ENV_TYPES, +) +from node_cli.configs.alias_address_validation import ( validate_env_alias_or_address, validate_contract_address, validate_contract_alias, get_chain_id, get_network_metadata, ContractType, - ALLOWED_ENV_TYPES, ) from node_cli.utils.exit_codes import CLIExitCodes +ENDPOINT = 'http://localhost:8545' + class FakeResponse: def __init__(self, status_code: int, json_data: Optional[dict] = None): @@ -31,14 +35,14 @@ def json(self): return self._json_data -def test_absent_params_returns_missing_keys(): +def test_absent_required_params_returns_missing_keys(): params = { 'A': '', 'B': 'value', 'C': '', 'MONITORING_CONTAINERS': 'optional', } - missing = absent_params(params) + missing = absent_required_params(params) assert 'A' in missing assert 'C' in missing assert 'MONITORING_CONTAINERS' not in missing @@ -61,20 +65,16 @@ def test_load_env_file_not_readable(tmp_path): os.chmod(env_file, 0o644) # reset permissions -def test_build_params_sync(): - params = build_params(sync_node=True) - assert 'SCHAIN_NAME' in params - +@pytest.mark.parametrize('sync_node,has_schain_name', [(True, True), (False, False)]) +def test_build_env_params_sync_and_non_sync(sync_node, has_schain_name): + params = build_env_params(sync_node=sync_node) + assert ('SCHAIN_NAME' in params) == has_schain_name -def test_build_params_non_sync(): - params = build_params(sync_node=False) - assert 'SCHAIN_NAME' not in params - -def test_populate_params_updates_from_environ(monkeypatch): +def test_populate_env_params_updates_from_environ(monkeypatch): params = {'FOO': ''} monkeypatch.setenv('FOO', 'bar') - populate_params(params) + populate_env_params(params) assert params['FOO'] == 'bar' @@ -135,22 +135,19 @@ def test_get_network_metadata_failure(requests_mock): def test_validate_contract_address_success(requests_mock): - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json={'result': '0x123'}) - validate_contract_address('0x' + 'a' * 40, endpoint) + requests_mock.post(ENDPOINT, json={'result': '0x123'}) + validate_contract_address('0x' + 'a' * 40, ENDPOINT) def test_validate_contract_address_no_code(requests_mock): - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json={'result': '0x'}) + requests_mock.post(ENDPOINT, json={'result': '0x'}) with pytest.raises(SystemExit) as excinfo: - validate_contract_address('0x' + 'a' * 40, endpoint) + validate_contract_address('0x' + 'a' * 40, ENDPOINT) assert excinfo.value.code == CLIExitCodes.FAILURE.value def test_validate_contract_alias_success(requests_mock): - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json={'result': '0x1'}) + requests_mock.post(ENDPOINT, json={'result': '0x1'}) metadata_url = ( 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' 'refs/heads/deployments/metadata.json' @@ -162,32 +159,29 @@ def test_validate_contract_alias_success(requests_mock): 'refs/heads/deployments/mainnet/skale-manager/test-alias.json' ) requests_mock.get(alias_url, status_code=200) - validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) + validate_contract_alias('test-alias', ContractType.MANAGER, ENDPOINT) def test_validate_contract_alias_network_missing(requests_mock): - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json={'result': '0x1'}) + requests_mock.post(ENDPOINT, json={'result': '0x1'}) metadata_url = ( 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' 'refs/heads/deployments/metadata.json' ) requests_mock.get(metadata_url, json={'networks': []}, status_code=200) with pytest.raises(SystemExit) as excinfo: - validate_contract_alias('test-alias', ContractType.MANAGER, endpoint) + validate_contract_alias('test-alias', ContractType.MANAGER, ENDPOINT) assert excinfo.value.code == CLIExitCodes.FAILURE.value def test_validate_env_alias_or_address_with_address(requests_mock): - endpoint = 'http://localhost:8545' addr = '0x' + 'b' * 40 - requests_mock.post(endpoint, json={'result': '0x1'}) - validate_env_alias_or_address(addr, ContractType.IMA, endpoint) + requests_mock.post(ENDPOINT, json={'result': '0x1'}) + validate_env_alias_or_address(addr, ContractType.IMA, ENDPOINT) def test_validate_env_alias_or_address_with_alias(requests_mock): - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json={'result': '0x1'}) + requests_mock.post(ENDPOINT, json={'result': '0x1'}) metadata_url = ( 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' 'refs/heads/deployments/metadata.json' @@ -199,10 +193,10 @@ def test_validate_env_alias_or_address_with_alias(requests_mock): 'refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' ) requests_mock.get(alias_url, status_code=200) - validate_env_alias_or_address('test-alias', ContractType.IMA, endpoint) + validate_env_alias_or_address('test-alias', ContractType.IMA, ENDPOINT) -def test_validate_params_missing_key(): +def test_validate_env_params_missing_key(): populated_params = { 'CONTAINER_CONFIGS_STREAM': 'value', 'ENDPOINT': 'http://localhost:8545', @@ -214,11 +208,11 @@ def test_validate_params_missing_key(): 'ENV_TYPE': 'mainnet', } with pytest.raises(SystemExit) as excinfo: - validate_params(populated_params) + validate_env_params(populated_params) assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_validate_params_success(valid_env_params, requests_mock): +def test_validate_env_params_success(valid_env_params, requests_mock): endpoint = valid_env_params['ENDPOINT'] requests_mock.post(endpoint, json={'result': '0x1'}) metadata_url = ( @@ -237,14 +231,13 @@ def test_validate_params_success(valid_env_params, requests_mock): 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' ) requests_mock.get(manager_alias_url, status_code=200) - validate_params(valid_env_params) + validate_env_params(valid_env_params) -def test_get_env_config_success( +def test_get_validated_env_config_success( valid_env_file, mock_chain_response, mock_networks_metadata, requests_mock ): - endpoint = 'http://localhost:8545' - requests_mock.post(endpoint, json=mock_chain_response) + requests_mock.post(ENDPOINT, json=mock_chain_response) metadata_url = ( 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' 'refs/heads/deployments/metadata.json' @@ -260,20 +253,20 @@ def test_get_env_config_success( 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' ) requests_mock.get(manager_alias_url, status_code=200) - config = get_env_config(valid_env_file) + config = get_validated_env_config(valid_env_file) assert config['ENDPOINT'] == 'http://localhost:8545' assert config['ENV_TYPE'] in ALLOWED_ENV_TYPES -def test_get_env_config_missing_file(): +def test_get_validated_env_config_missing_file(): with pytest.raises(SystemExit) as excinfo: - get_env_config('nonexistent.env') + get_validated_env_config('nonexistent.env') assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_get_env_config_unreadable_file(valid_env_file): +def test_get_validated_env_config_unreadable_file(valid_env_file): os.chmod(valid_env_file, 0o000) with pytest.raises(SystemExit) as excinfo: - get_env_config(valid_env_file) + get_validated_env_config(valid_env_file) assert excinfo.value.code == CLIExitCodes.FAILURE.value os.chmod(valid_env_file, 0o644) diff --git a/tests/conftest.py b/tests/conftest.py index bea8c81f..a7c2c0cd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,7 +16,6 @@ # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see . -"""SKALE config test""" import json import os @@ -296,9 +295,6 @@ def tmp_sync_datadir(): @pytest.fixture def valid_env_params(): - """ - Return a dictionary of environment parameters that mimics the contents of test-env. - """ return { 'ENDPOINT': 'http://localhost:8545', 'IMA_ENDPOINT': 'http://127.0.01', @@ -320,33 +316,30 @@ def valid_env_params(): @pytest.fixture def valid_env_file(valid_env_params): - """ - Create a temporary .env file whose contents mimic test-env. - - This file is created using the key/value pairs from valid_env_params, - one per line in the form KEY=VALUE. - """ - with tempfile.NamedTemporaryFile(mode='w', delete=False) as f: - for key, value in valid_env_params.items(): - f.write(f'{key}={value}\n') - file_name = f.name - yield file_name - os.unlink(file_name) + """Create a temporary .env file whose contents mimic test-env.""" + file_name = None + try: + with tempfile.NamedTemporaryFile(mode='w', delete=False) as f: + for key, value in valid_env_params.items(): + f.write(f'{key}={value}\n') + file_name = f.name + yield file_name + finally: + if file_name: + os.unlink(file_name) @pytest.fixture def mock_chain_response(): - """Return a fake RPC response for chain ID 1.""" return { 'jsonrpc': '2.0', 'id': 1, - 'result': '0x1', # Represents chain ID 1 + 'result': '0x1', } @pytest.fixture def mock_networks_metadata(): - """Return fake network metadata that includes chain ID 1.""" return { 'networks': [ {'chainId': 1, 'name': 'Mainnet', 'path': 'mainnet'}, diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 6347b157..f45a9be3 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -151,7 +151,7 @@ def test_init_node(no_resource_file): # todo: write new init node test mock.patch('node_cli.core.node.init_op'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.utils.helper.post_request', resp_mock), - mock.patch('node_cli.configs.env.validate_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), ): init(env_filepath) assert os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) @@ -176,7 +176,7 @@ def test_update_node(mocked_g_config, resource_file): 'node_cli.core.node.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), - mock.patch('node_cli.configs.env.validate_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), ): with mock.patch( 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() diff --git a/tests/core/nftables_test.py b/tests/core/nftables_test.py index 74852585..b6490b08 100644 --- a/tests/core/nftables_test.py +++ b/tests/core/nftables_test.py @@ -9,7 +9,7 @@ @pytest.fixture(scope='module') def nft_manager(): - """Returns a NFTablesManager instance""" + """Returns a NFTablesManager instance.""" manager = NFTablesManager(family='inet', table='filter') try: yield manager @@ -19,7 +19,7 @@ def nft_manager(): @pytest.fixture def mock_nft_output(): - """Fixture for mock nftables output""" + """Fixture for mocking nftables output.""" return { 'nftables': [ {'chain': {'family': 'inet', 'table': 'filter', 'name': 'INPUT', 'handle': 1}}, @@ -46,7 +46,7 @@ def mock_nft_output(): def test_init(nft_manager): - """Test initialization""" + """Test initialization.""" assert nft_manager.family == 'inet' assert nft_manager.table == 'filter' assert isinstance(nft_manager.nft, nftables.Nftables) @@ -54,7 +54,7 @@ def test_init(nft_manager): @patch('nftables.Nftables.json_cmd') def test_execute_cmd_success(mock_json_cmd, nft_manager): - """Test successful command execution""" + """Test successful command execution.""" mock_json_cmd.return_value = (0, '', '') cmd = {'nftables': [{'add': {'table': {'family': 'inet', 'name': 'filter'}}}]} @@ -64,7 +64,7 @@ def test_execute_cmd_success(mock_json_cmd, nft_manager): @patch('nftables.Nftables.json_cmd') def test_execute_cmd_failure(mock_json_cmd, nft_manager): - """Test command execution failure""" + """Test command execution failure.""" mock_json_cmd.return_value = (1, '', 'Error message') cmd = {'nftables': [{'add': {'table': {'family': 'inet', 'name': 'filter'}}}]} @@ -75,7 +75,7 @@ def test_execute_cmd_failure(mock_json_cmd, nft_manager): @patch('nftables.Nftables.cmd') def test_get_chains(mock_cmd, nft_manager, mock_nft_output): - """Test getting chains""" + """Test getting chains.""" mock_cmd.return_value = (0, json.dumps(mock_nft_output), '') chains = nft_manager.get_chains() @@ -85,7 +85,7 @@ def test_get_chains(mock_cmd, nft_manager, mock_nft_output): @patch('nftables.Nftables.cmd') def test_chain_exists(mock_cmd, nft_manager, mock_nft_output): - """Test chain existence check""" + """Test chain existence check.""" mock_cmd.return_value = (0, json.dumps(mock_nft_output), '') assert nft_manager.chain_exists('INPUT') @@ -95,7 +95,7 @@ def test_chain_exists(mock_cmd, nft_manager, mock_nft_output): @patch.object(NFTablesManager, 'execute_cmd') @patch.object(NFTablesManager, 'chain_exists') def test_create_chain_if_not_exists(mock_exists, mock_execute, nft_manager): - """Test chain creation""" + """Test chain creation.""" mock_exists.return_value = False nft_manager.create_chain_if_not_exists('INPUT', 'input') @@ -113,7 +113,7 @@ def test_create_chain_if_not_exists(mock_exists, mock_execute, nft_manager): @patch.object(NFTablesManager, 'execute_cmd') @patch.object(NFTablesManager, 'rule_exists') def test_add_rule(mock_exists, mock_execute, nft_manager, rule_data): - """Test rule addition with different types""" + """Test rule addition with different types.""" mock_exists.return_value = False rule = Rule(**rule_data) @@ -123,7 +123,7 @@ def test_add_rule(mock_exists, mock_execute, nft_manager, rule_data): @patch.object(NFTablesManager, 'execute_cmd') def test_setup_firewall(mock_execute, nft_manager): - """Test complete firewall setup""" + """Test complete firewall setup.""" with patch.multiple( NFTablesManager, table_exists=Mock(return_value=False), @@ -135,7 +135,7 @@ def test_setup_firewall(mock_execute, nft_manager): def test_invalid_protocol(nft_manager): - """Test adding rule with invalid protocol""" + """Test adding rule with invalid protocol.""" rule = Rule(chain='INPUT', protocol='invalid', first_port=80) with pytest.raises(Exception): nft_manager.add_rule(rule) From bf876d96745867e6a3bda664b585d0613604f08b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 14 Apr 2025 15:22:41 +0100 Subject: [PATCH 020/332] Fixes typo in resource allocation file debug message --- node_cli/core/resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 94f3081d..5762ebad 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -93,7 +93,7 @@ def compose_resource_allocation_config(env_type: str, params_by_env_type: Dict = def generate_resource_allocation_config(env_file, force=False) -> None: if not force and os.path.isfile(RESOURCE_ALLOCATION_FILEPATH): - msg = 'Resource allocation file is already exists' + msg = 'Resource allocation file already exists' logger.debug(msg) print(msg) return From 22bb7a8e066c8e4fcf4d749497a311a564953de1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 14 Apr 2025 15:45:19 +0100 Subject: [PATCH 021/332] Adjusted tests to expect PermissionError for unreadable files & Updated test pipeline to only run tests on push --- .github/workflows/test.yml | 2 +- tests/configs/configs_env_validate_test.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a8b2eb80..7ca8a514 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,5 +1,5 @@ name: Test -on: [push, pull_request] +on: [push] jobs: test: diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 240fadf4..ed778d40 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -59,9 +59,8 @@ def test_load_env_file_not_readable(tmp_path): env_file = tmp_path / 'test.env' env_file.write_text('KEY=value') os.chmod(env_file, 0o000) - with pytest.raises(SystemExit) as excinfo: + with pytest.raises(PermissionError): load_env_file(str(env_file)) - assert excinfo.value.code == CLIExitCodes.FAILURE.value os.chmod(env_file, 0o644) # reset permissions @@ -266,7 +265,6 @@ def test_get_validated_env_config_missing_file(): def test_get_validated_env_config_unreadable_file(valid_env_file): os.chmod(valid_env_file, 0o000) - with pytest.raises(SystemExit) as excinfo: + with pytest.raises(PermissionError): get_validated_env_config(valid_env_file) - assert excinfo.value.code == CLIExitCodes.FAILURE.value os.chmod(valid_env_file, 0o644) From 5d15db7ffdc587d63c3b9b8073354aa8188a68bf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 14 Apr 2025 18:15:36 +0100 Subject: [PATCH 022/332] Addresses PR #839 comments. --- node_cli/configs/alias_address_validation.py | 2 +- node_cli/core/docker_config.py | 53 ++++++-------------- node_cli/operations/skale_node.py | 3 +- node_cli/utils/git_utils.py | 31 +++++------- 4 files changed, 31 insertions(+), 58 deletions(-) diff --git a/node_cli/configs/alias_address_validation.py b/node_cli/configs/alias_address_validation.py index ea74b81d..e0dcaa5e 100644 --- a/node_cli/configs/alias_address_validation.py +++ b/node_cli/configs/alias_address_validation.py @@ -2,7 +2,7 @@ # # This file is part of node-cli # -# Copyright (C) 2019-Present SKALE Labs +# Copyright (C) 2025-Present SKALE Labs # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by diff --git a/node_cli/core/docker_config.py b/node_cli/core/docker_config.py index d5241be3..d0f07051 100644 --- a/node_cli/core/docker_config.py +++ b/node_cli/core/docker_config.py @@ -73,7 +73,7 @@ class DockerConfigResult(enum.IntEnum): def ensure_docker_service_config_dir( - docker_service_dir: Path = DOCKER_SERVICE_CONFIG_DIR + docker_service_dir: Path = DOCKER_SERVICE_CONFIG_DIR, ) -> DockerConfigResult: logger.info('Ensuring docker service dir') if not os.path.isdir(docker_service_dir): @@ -84,8 +84,7 @@ def ensure_docker_service_config_dir( def ensure_service_overriden_config( - config_filepath: - Optional[Path] = DOCKER_SERVICE_CONFIG_PATH + config_filepath: Optional[Path] = DOCKER_SERVICE_CONFIG_PATH, ) -> DockerConfigResult: logger.info('Ensuring docker service override config') config = get_content(config_filepath) @@ -95,8 +94,8 @@ def ensure_service_overriden_config( '[Service]', 'ExecStart=', 'ExecStart=/usr/bin/dockerd', - f'ExecStartPre=/bin/mkdir -p {socket_dir}' - ] + f'ExecStartPre=/bin/mkdir -p {socket_dir}', + ] ) if not os.path.isfile(config_filepath): @@ -105,37 +104,28 @@ def ensure_service_overriden_config( config_file.write(expected_config) return DockerConfigResult.CHANGED elif config != expected_config: - raise OverridenConfigExsitsError( - f'{config_filepath} already exists' - ) + raise OverridenConfigExsitsError(f'{config_filepath} already exists') return DockerConfigResult.UNCHANGED def ensure_docker_daemon_config( - daemon_config_path: Path = DOCKER_DEAMON_CONFIG_PATH, - daemon_hosts: Path = DOCKER_DAEMON_HOSTS + daemon_config_path: Path = DOCKER_DEAMON_CONFIG_PATH, daemon_hosts: Path = DOCKER_DAEMON_HOSTS ) -> None: logger.info('Ensuring docker daemon config') config = {} if os.path.isfile(daemon_config_path): with open(daemon_config_path, 'r') as daemon_config: config = json.load(daemon_config) - if config.get('live-restore') is True and \ - config.get('hosts') == daemon_hosts: + if config.get('live-restore') is True and config.get('hosts') == daemon_hosts: return DockerConfigResult.UNCHANGED - config.update({ - 'live-restore': True, - 'hosts': daemon_hosts - }) + config.update({'live-restore': True, 'hosts': daemon_hosts}) logger.info('Updating docker daemon config') with open(daemon_config_path, 'w') as daemon_config: json.dump(config, daemon_config) return DockerConfigResult.CHANGED -def restart_docker_service( - docker_service_name: str = 'docker' -) -> DockerConfigResult: +def restart_docker_service(docker_service_name: str = 'docker') -> DockerConfigResult: logger.info('Executing daemon-reload') run_cmd(['systemctl', 'daemon-reload']) @@ -149,18 +139,14 @@ def is_socket_existed(socket_path: Path = DOCKER_SOCKET_PATH) -> bool: def wait_for_socket_initialization( - socket_path: Path = DOCKER_SOCKET_PATH, - allowed_time: int = 300 + socket_path: Path = DOCKER_SOCKET_PATH, allowed_time: int = 300 ) -> None: logger.info('Waiting for docker inititalization') start_ts = time.time() - while int(time.time() - start_ts) < allowed_time and \ - not is_socket_existed(socket_path): + while int(time.time() - start_ts) < allowed_time and not is_socket_existed(socket_path): time.sleep(2) if not is_socket_existed(socket_path): - raise SocketInitTimeoutError( - f'Socket was not able to init in {allowed_time}' - ) + raise SocketInitTimeoutError(f'Socket was not able to init in {allowed_time}') logger.info('Socket initialized successfully') @@ -172,16 +158,10 @@ def ensure_run_dir(run_dir: Path = SKALE_RUN_DIR) -> DockerConfigResult: def assert_no_containers(ignore: Tuple[str] = ()): - containers = [ - c.name - for c in get_containers() - if c.name not in ignore - ] + containers = [c.name for c in get_containers() if c.name not in ignore] if len(containers) > 0: logger.fatal('%s containers exist', ' '.join(containers)) - raise ContainersExistError( - f'Existed containers amount {len(containers)}' - ) + raise ContainersExistError(f'Existed containers amount {len(containers)}') def configure_docker() -> None: @@ -190,13 +170,12 @@ def configure_docker() -> None: ensure_run_dir, ensure_docker_service_config_dir, ensure_service_overriden_config, - ensure_docker_daemon_config + ensure_docker_daemon_config, ) results = (task() for task in pre_restart_tasks) results = list(results) logger.info('Docker config changes %s', results) - if not is_socket_existed() or \ - any(r == DockerConfigResult.CHANGED for r in results): + if not is_socket_existed() or any(r == DockerConfigResult.CHANGED for r in results): restart_docker_service() wait_for_socket_initialization() diff --git a/node_cli/operations/skale_node.py b/node_cli/operations/skale_node.py index 39e5b4c7..5ccb1a85 100644 --- a/node_cli/operations/skale_node.py +++ b/node_cli/operations/skale_node.py @@ -56,7 +56,8 @@ def download_skale_node(stream: Optional[str] = None, src: Optional[str] = None) logger.info(f'Syncing config files from {src}') rsync_dirs(src, dest) else: - assert stream + if not stream: + error_exit('Stream must be provided if src is not specified in download_skale_node') logger.info(f'Cloning config files from {SKALE_NODE_REPO_URL} ({stream})') clone_repo(SKALE_NODE_REPO_URL, dest, stream) diff --git a/node_cli/utils/git_utils.py b/node_cli/utils/git_utils.py index 429c310f..4c31c219 100644 --- a/node_cli/utils/git_utils.py +++ b/node_cli/utils/git_utils.py @@ -31,12 +31,9 @@ def check_is_branch(repo: Repo, ref_name: str) -> bool: def clone_repo(repo_url: str, repo_path: str, ref_name: str) -> None: - try: - logger.info(f'Cloning {repo_url} → {repo_path}') - Repo.clone_from(repo_url, repo_path) - fetch_pull_repo(repo_path, ref_name) - except Exception as e: - error_exit(f'Unexpected error cloning repository: {str(e)}') + logger.info(f'Cloning {repo_url} → {repo_path}') + Repo.clone_from(repo_url, repo_path) + fetch_pull_repo(repo_path, ref_name) def sync_repo(repo_url: str, repo_path: str, ref_name: str) -> None: @@ -54,19 +51,15 @@ def sync_repo(repo_url: str, repo_path: str, ref_name: str) -> None: def fetch_pull_repo(repo_path: str, ref_name: str) -> None: """Fetch latest changes and checkout/pull specific git reference.""" - try: - repo = Repo(repo_path) - repo_name = os.path.basename(repo.working_dir) - - logger.info(f'Fetching latest changes for {repo_name}') - repo.remotes.origin.fetch() + repo = Repo(repo_path) + repo_name = os.path.basename(repo.working_dir) - logger.info(f'Checking out {ref_name} in {repo_name}') - repo.git.checkout(ref_name) + logger.info(f'Fetching latest changes for {repo_name}') + repo.remotes.origin.fetch() - if check_is_branch(repo, ref_name): - logger.info(f'Pulling latest changes for branch {ref_name}') - repo.remotes.origin.pull() + logger.info(f'Checking out {ref_name} in {repo_name}') + repo.git.checkout(ref_name) - except Exception as e: - error_exit(f'Repository operation failed: {str(e)}') + if check_is_branch(repo, ref_name): + logger.info(f'Pulling latest changes for branch {ref_name}') + repo.remotes.origin.pull() From 6807baba2693e8c7741ad4cde8813fe584988aa9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 14 Apr 2025 18:22:57 +0100 Subject: [PATCH 023/332] Removes unused helper import. --- node_cli/utils/git_utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node_cli/utils/git_utils.py b/node_cli/utils/git_utils.py index 4c31c219..38e5a11c 100644 --- a/node_cli/utils/git_utils.py +++ b/node_cli/utils/git_utils.py @@ -21,7 +21,6 @@ import logging from git.repo.base import Repo -from node_cli.utils.helper import error_exit logger = logging.getLogger(__name__) From a40c30f7709ab9ae04cb7681ef2eae37a2cdceda Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 14 Apr 2025 18:44:00 +0100 Subject: [PATCH 024/332] Apply ruff fomatter --- node_cli/cli/exit.py | 28 ++-- node_cli/cli/health.py | 7 +- node_cli/cli/logs.py | 13 +- node_cli/cli/lvmpy.py | 4 +- node_cli/cli/resources_allocation.py | 18 +-- node_cli/cli/schains.py | 46 +++--- node_cli/cli/wallet.py | 16 ++- node_cli/core/checks.py | 163 +++++++--------------- node_cli/core/health.py | 21 +-- node_cli/core/logs.py | 6 +- node_cli/core/node_options.py | 5 +- node_cli/core/ssl/check.py | 109 +++++++-------- node_cli/core/ssl/status.py | 19 +-- node_cli/core/ssl/utils.py | 6 +- node_cli/core/wallet.py | 5 +- node_cli/migrations/focal_to_jammy.py | 4 +- node_cli/operations/volume.py | 18 +-- tests/cli/exit_test.py | 19 ++- tests/cli/health_test.py | 135 +++++++++--------- tests/cli/logs_test.py | 4 +- tests/cli/schains_test.py | 193 +++++++++++++++----------- tests/cli/wallet_test.py | 38 ++--- tests/core/core_checks_test.py | 49 +++---- tests/core/core_logs_test.py | 9 +- tests/core/host/docker_config_test.py | 14 +- tests/core/host/kernel_config_test.py | 5 +- tests/core/migration_test.py | 5 +- tests/docker_utils_test.py | 15 +- tests/helper.py | 27 +--- tests/resources_test.py | 53 +++++-- tests/routes_test.py | 14 +- tests/simple_container/main.py | 2 +- tests/tools_meta_test.py | 39 ++++-- tests/utils/decorators_test.py | 3 + tests/utils/global_config_test.py | 1 - 35 files changed, 516 insertions(+), 597 deletions(-) diff --git a/node_cli/cli/exit.py b/node_cli/cli/exit.py index 1ef0223a..92322bd9 100644 --- a/node_cli/cli/exit.py +++ b/node_cli/cli/exit.py @@ -36,20 +36,21 @@ def exit_cli(): pass -@exit_cli.group('exit', help="Exit commands") +@exit_cli.group('exit', help='Exit commands') def node_exit(): pass -@node_exit.command('start', help="Start exiting process") -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to destroy your SKALE node?') +@node_exit.command('start', help='Start exiting process') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to destroy your SKALE node?', +) def start(): - status, payload = post_request( - blueprint=BLUEPRINT_NAME, - method='exit/start' - ) + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='exit/start') if status == 'ok': msg = TEXTS['exit']['start'] logger.info(msg) @@ -58,13 +59,10 @@ def start(): error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) -@node_exit.command('status', help="Get exit process status") +@node_exit.command('status', help='Get exit process status') @click.option('--format', '-f', type=click.Choice(['json', 'text'])) def status(format): - status, payload = get_request( - blueprint=BLUEPRINT_NAME, - method='exit/status' - ) + status, payload = get_request(blueprint=BLUEPRINT_NAME, method='exit/status') if status == 'ok': exit_status = payload if format == 'json': @@ -75,6 +73,6 @@ def status(format): error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) -@node_exit.command('finalize', help="Finalize exit process") +@node_exit.command('finalize', help='Finalize exit process') def finalize(): pass diff --git a/node_cli/cli/health.py b/node_cli/cli/health.py index fef51a3d..de54d186 100644 --- a/node_cli/cli/health.py +++ b/node_cli/cli/health.py @@ -44,12 +44,7 @@ def containers(all): @health.command(help=TEXTS['schains_checks']['help']) -@click.option( - '--json', - 'json_format', - help=G_TEXTS['common']['json'], - is_flag=True -) +@click.option('--json', 'json_format', help=G_TEXTS['common']['json'], is_flag=True) def schains(json_format: bool) -> None: get_schains_checks(json_format) diff --git a/node_cli/cli/logs.py b/node_cli/cli/logs.py index 7472bcbf..de11e889 100644 --- a/node_cli/cli/logs.py +++ b/node_cli/cli/logs.py @@ -30,12 +30,12 @@ def logs_cli(): pass -@logs_cli.group(help="Logs commands") +@logs_cli.group(help='Logs commands') def logs(): pass -@logs.command(help="Fetch the logs of the node-cli") +@logs.command(help='Fetch the logs of the node-cli') @click.option('--debug', is_flag=True) def cli(debug): filepath = DEBUG_LOG_FILEPATH if debug else LOG_FILEPATH @@ -43,13 +43,8 @@ def cli(debug): print(fin.read()) -@logs.command(help="Dump all logs from the connected node") -@click.option( - '--container', - '-c', - help='Dump logs only from specified container', - default=None -) +@logs.command(help='Dump all logs from the connected node') +@click.option('--container', '-c', help='Dump logs only from specified container', default=None) @click.argument('path') def dump(container, path): res = create_logs_dump(path, container) diff --git a/node_cli/cli/lvmpy.py b/node_cli/cli/lvmpy.py index 473defa8..d795a78e 100644 --- a/node_cli/cli/lvmpy.py +++ b/node_cli/cli/lvmpy.py @@ -44,7 +44,7 @@ def health(): is_flag=True, callback=abort_if_false, expose_value=False, - prompt=TEXTS['run']['prompt'] + prompt=TEXTS['run']['prompt'], ) def run(): run_lvmpy() @@ -56,7 +56,7 @@ def run(): is_flag=True, callback=abort_if_false, expose_value=False, - prompt=TEXTS['heal']['prompt'] + prompt=TEXTS['heal']['prompt'], ) def heal(): heal_service() diff --git a/node_cli/cli/resources_allocation.py b/node_cli/cli/resources_allocation.py index c8ed758c..768690ed 100644 --- a/node_cli/cli/resources_allocation.py +++ b/node_cli/cli/resources_allocation.py @@ -22,7 +22,7 @@ from node_cli.core.resources import ( get_resource_allocation_info, - generate_resource_allocation_config + generate_resource_allocation_config, ) from node_cli.utils.helper import abort_if_false, safe_load_texts @@ -34,12 +34,12 @@ def resources_allocation_cli(): pass -@resources_allocation_cli.group(help="Resources allocation commands") +@resources_allocation_cli.group(help='Resources allocation commands') def resources_allocation(): pass -@resources_allocation.command('show', help="Show resources allocation file") +@resources_allocation.command('show', help='Show resources allocation file') def show(): resource_allocation_info = get_resource_allocation_info() if resource_allocation_info: @@ -48,15 +48,15 @@ def show(): print('No resources allocation file on this machine') -@resources_allocation.command('generate', - help="Generate/update resources allocation file") +@resources_allocation.command('generate', help='Generate/update resources allocation file') @click.argument('env_file') @click.option( - '--yes', is_flag=True, callback=abort_if_false, + '--yes', + is_flag=True, + callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to generate/update resource allocation file?' + prompt='Are you sure you want to generate/update resource allocation file?', ) -@click.option('--force', '-f', is_flag=True, - help='Rewrite if already exists') +@click.option('--force', '-f', is_flag=True, help='Rewrite if already exists') def generate(env_file, force): generate_resource_allocation_config(env_file=env_file, force=force) diff --git a/node_cli/cli/schains.py b/node_cli/cli/schains.py index c6ef4486..188b39fe 100644 --- a/node_cli/cli/schains.py +++ b/node_cli/cli/schains.py @@ -30,7 +30,7 @@ show_config, show_dkg_info, show_schains, - toggle_schain_repair_mode + toggle_schain_repair_mode, ) @@ -39,17 +39,13 @@ def schains_cli() -> None: pass -@schains_cli.group('schains', help="Node sChains commands") +@schains_cli.group('schains', help='Node sChains commands') def schains() -> None: pass -@schains.command(help="List of sChains served by connected node") -@click.option( - '-n', '--names', - help='Shows only chain names', - is_flag=True -) +@schains.command(help='List of sChains served by connected node') +@click.option('-n', '--names', help='Shows only chain names', is_flag=True) def ls(names: bool) -> None: if names: schains: str = get_schains_by_artifacts() @@ -58,17 +54,13 @@ def ls(names: bool) -> None: show_schains() -@schains.command(help="DKG statuses for each sChain on the node") -@click.option( - '--all', '-a', 'all_', - help='Shows active and deleted sChains', - is_flag=True -) +@schains.command(help='DKG statuses for each sChain on the node') +@click.option('--all', '-a', 'all_', help='Shows active and deleted sChains', is_flag=True) def dkg(all_: bool) -> None: show_dkg_info(all_) -@schains.command('config', help="sChain config") +@schains.command('config', help='sChain config') @click.argument('schain_name') def get_schain_config(schain_name: str) -> None: show_config(schain_name) @@ -82,15 +74,19 @@ def show_rules(schain_name: str) -> None: @schains.command('repair', help='Toggle schain repair mode') @click.argument('schain_name') -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure? Repair mode may corrupt working SKALE chain data.') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure? Repair mode may corrupt working SKALE chain data.', +) @click.option( '--snapshot-from', type=URL_TYPE, default=None, hidden=True, - help='Ip of the node from to download snapshot from' + help='Ip of the node from to download snapshot from', ) def repair(schain_name: str, snapshot_from: Optional[str] = None) -> None: toggle_schain_repair_mode(schain_name, snapshot_from=snapshot_from) @@ -98,12 +94,7 @@ def repair(schain_name: str, snapshot_from: Optional[str] = None) -> None: @schains.command('info', help='Show info about schain') @click.argument('schain_name') -@click.option( - '--json', - 'json_format', - help='Show info in JSON format', - is_flag=True -) +@click.option('--json', 'json_format', help='Show info in JSON format', is_flag=True) def info_(schain_name: str, json_format: bool) -> None: describe(schain_name, raw=json_format) @@ -114,9 +105,6 @@ def info_(schain_name: str, json_format: bool) -> None: @click.option('--schain-type', default='medium') @click.option('--env-type', default=None) def restore( - schain_name: str, - snapshot_path: str, - schain_type: str, - env_type: Optional[str] + schain_name: str, snapshot_path: str, schain_type: str, env_type: Optional[str] ) -> None: restore_schain_from_snapshot(schain_name, snapshot_path) diff --git a/node_cli/cli/wallet.py b/node_cli/cli/wallet.py index c794f969..b8f4f6b5 100644 --- a/node_cli/cli/wallet.py +++ b/node_cli/cli/wallet.py @@ -32,22 +32,26 @@ def wallet_cli(): pass -@wallet_cli.group('wallet', help="Node wallet commands") +@wallet_cli.group('wallet', help='Node wallet commands') def wallet(): pass -@wallet.command('info', help="Get info about SKALE node wallet") +@wallet.command('info', help='Get info about SKALE node wallet') @click.option('--format', '-f', type=click.Choice(['json', 'text'])) def wallet_info(format): get_wallet_info(format) -@wallet.command('send', help="Send ETH from SKALE node wallet to address") +@wallet.command('send', help='Send ETH from SKALE node wallet to address') @click.argument('address') @click.argument('amount', type=float) -@click.option('--yes', is_flag=True, callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to send ETH tokens?') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to send ETH tokens?', +) def send(address, amount): send_eth(address, amount) diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index cb4bae86..78a66fa7 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -30,10 +30,18 @@ from collections import namedtuple from functools import wraps from typing import ( - Any, Callable, cast, - Dict, Iterable, Iterator, - List, Optional, - Tuple, TypeVar, Union, ) + Any, + Callable, + cast, + Dict, + Iterable, + Iterator, + List, + Optional, + Tuple, + TypeVar, + Union, +) import docker # type: ignore import psutil # type: ignore @@ -47,7 +55,7 @@ DOCKER_CONFIG_FILEPATH, DOCKER_DAEMON_HOSTS, REPORTS_PATH, - STATIC_PARAMS_FILEPATH + STATIC_PARAMS_FILEPATH, ) from node_cli.core.host import is_ufw_ipv6_chain_exists, is_ufw_ipv6_option_enabled from node_cli.core.resources import get_disk_size @@ -68,10 +76,7 @@ FuncList = List[Func] -def get_static_params( - env_type: str = 'mainnet', - config_path: str = CONTAINER_CONFIG_PATH -) -> Dict: +def get_static_params(env_type: str = 'mainnet', config_path: str = CONTAINER_CONFIG_PATH) -> Dict: status_params_filename = os.path.basename(STATIC_PARAMS_FILEPATH) static_params_filepath = os.path.join(config_path, status_params_filename) with open(static_params_filepath) as requirements_file: @@ -84,11 +89,7 @@ def check_quietly(check: Func, *args, **kwargs) -> CheckResult: return check(*args, **kwargs) except Exception as err: logger.exception('%s check errored') - return CheckResult( - name=check.__name__, - status='error', - info=repr(err) - ) + return CheckResult(name=check.__name__, status='error', info=repr(err)) class CheckType(enum.Enum): @@ -117,13 +118,8 @@ def wrapper(*args, **kwargs) -> CheckResult: return cast(Func, wrapper) -def generate_report_from_result( - check_result: List[CheckResult] -) -> List[Dict]: - report = [ - {'name': cr.name, 'status': cr.status} - for cr in check_result - ] +def generate_report_from_result(check_result: List[CheckResult]) -> List[Dict]: + report = [{'name': cr.name, 'status': cr.status} for cr in check_result] return report @@ -144,10 +140,7 @@ def get_report(report_path: str = CHECK_REPORT_PATH) -> List[Dict]: return saved_report -def save_report( - new_report: List[Dict], - report_path: str = CHECK_REPORT_PATH -) -> None: +def save_report(new_report: List[Dict], report_path: str = CHECK_REPORT_PATH) -> None: safe_mkdir(REPORTS_PATH) with open(report_path, 'w') as report_file: json.dump(new_report, report_file, indent=4) @@ -157,28 +150,14 @@ def merge_reports( old_report: List[Dict], new_report: List[Dict], ) -> List[Dict]: - return list(dedup( - itertools.chain( - new_report, - old_report - ), - key=lambda r: r['name'] - )) + return list(dedup(itertools.chain(new_report, old_report), key=lambda r: r['name'])) class BaseChecker: - def _ok( - self, - name: str, - info: Optional[Union[str, Dict]] = None - ) -> CheckResult: + def _ok(self, name: str, info: Optional[Union[str, Dict]] = None) -> CheckResult: return CheckResult(name=name, status='ok', info=info) - def _failed( - self, - name: str, - info: Optional[Union[str, Dict]] = None - ) -> CheckResult: + def _failed(self, name: str, info: Optional[Union[str, Dict]] = None) -> CheckResult: return CheckResult(name=name, status='failed', info=info) def get_checks(self, check_type: CheckType = CheckType.ALL) -> FuncList: @@ -189,8 +168,8 @@ def get_checks(self, check_type: CheckType = CheckType.ALL) -> FuncList: methods = inspect.getmembers( type(self), - predicate=lambda m: inspect.isfunction(m) and - getattr(m, '_check_type', None) in allowed_types + predicate=lambda m: inspect.isfunction(m) + and getattr(m, '_check_type', None) in allowed_types, ) return [functools.partial(m[1], self) for m in methods] @@ -209,10 +188,8 @@ def check(self) -> ResultList: class MachineChecker(BaseChecker): def __init__( - self, - requirements: Dict, - disk_device: str, - network_timeout: Optional[int] = None) -> None: + self, requirements: Dict, disk_device: str, network_timeout: Optional[int] = None + ) -> None: self.requirements = requirements self.disk_device = disk_device self.network_timeout = network_timeout or NETWORK_CHECK_TIMEOUT @@ -242,11 +219,11 @@ def cpu_physical(self) -> CheckResult: @preinstall def memory(self) -> CheckResult: name = 'memory' - mem_info = psutil.virtual_memory().total, + mem_info = (psutil.virtual_memory().total,) actual = mem_info[0] expected = self.requirements['memory'] - actual_gb = round(actual / 1024 ** 3, 2) - expected_gb = round(expected / 1024 ** 3, 2) + actual_gb = round(actual / 1024**3, 2) + expected_gb = round(expected / 1024**3, 2) info = f'Expected RAM {expected_gb} GB, actual {actual_gb} GB' if actual < expected: return self._failed(name=name, info=info) @@ -258,8 +235,8 @@ def swap(self) -> CheckResult: name = 'swap' actual = psutil.swap_memory().total expected = self.requirements['swap'] - actual_gb = round(actual / 1024 ** 3, 2) - expected_gb = round(expected / 1024 ** 3, 2) + actual_gb = round(actual / 1024**3, 2) + expected_gb = round(expected / 1024**3, 2) info = f'Expected swap memory {expected_gb} GB, actual {actual_gb} GB' if actual < expected: return self._failed(name=name, info=info) @@ -274,8 +251,8 @@ def disk(self) -> CheckResult: name = 'disk' actual = self._get_disk_size() expected = self.requirements['disk'] - actual_gb = round(actual / 1024 ** 3, 2) - expected_gb = round(expected / 1024 ** 3, 2) + actual_gb = round(actual / 1024**3, 2) + expected_gb = round(expected / 1024**3, 2) info = f'Expected disk size {expected_gb} GB, actual {actual_gb} GB' if actual < expected: return self._failed(name=name, info=info) @@ -288,7 +265,8 @@ def network(self) -> CheckResult: try: socket.setdefaulttimeout(self.network_timeout) socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect( - (CLOUDFLARE_DNS_HOST, CLOUDFLARE_DNS_HOST_PORT)) + (CLOUDFLARE_DNS_HOST, CLOUDFLARE_DNS_HOST_PORT) + ) return self._ok(name=name) except socket.error as err: info = f'Network checking returned error: {err}' @@ -299,24 +277,17 @@ class PackageChecker(BaseChecker): def __init__(self, requirements: Dict) -> None: self.requirements = requirements - def _check_apt_package(self, package_name: str, - version: str = None) -> CheckResult: + def _check_apt_package(self, package_name: str, version: str = None) -> CheckResult: # TODO: check versions - dpkg_cmd_result = run_cmd( - ['dpkg', '-s', package_name], check_code=False) + dpkg_cmd_result = run_cmd(['dpkg', '-s', package_name], check_code=False) output = dpkg_cmd_result.stdout.decode('utf-8').strip() if dpkg_cmd_result.returncode != 0: return self._failed(name=package_name, info=output) actual_version = self._version_from_dpkg_output(output) expected_version = self.requirements[package_name] - info = { - 'expected_version': expected_version, - 'actual_version': actual_version - } - compare_result = debian_support.version_compare( - actual_version, expected_version - ) + info = {'expected_version': expected_version, 'actual_version': actual_version} + compare_result = debian_support.version_compare(actual_version, expected_version) if compare_result == -1: return self._failed(name=package_name, info=info) else: @@ -342,24 +313,15 @@ def psmisc(self) -> CheckResult: def ufw_ipv6_disabled(self) -> CheckResult: name = 'ufw-ipv6' if is_ufw_ipv6_option_enabled(): - return self._failed( - name=name, - info='ufw ipv6 configuration should be disabled' - ) + return self._failed(name=name, info='ufw ipv6 configuration should be disabled') elif is_ufw_ipv6_chain_exists(): - return self._failed( - name=name, - info='ufw should be reloaded to switch off ipv6' - ) + return self._failed(name=name, info='ufw should be reloaded to switch off ipv6') else: return self._ok(name=name) def _version_from_dpkg_output(self, output: str) -> str: info_lines = map(lambda s: s.strip(), output.split('\n')) - v_line = next(filter( - lambda s: s.startswith('Version'), - info_lines - )) + v_line = next(filter(lambda s: s.startswith('Version'), info_lines)) return v_line.split()[1] @@ -386,17 +348,11 @@ def docker_engine(self) -> CheckResult: version_info = self._get_docker_version_info() if not version_info: - return self._failed( - name=name, - info='Docker api request failed. Is docker installed?' - ) + return self._failed(name=name, info='Docker api request failed. Is docker installed?') logger.debug('Docker version info %s', version_info) actual_version = self.docker_client.version()['Version'] expected_version = self.requirements['docker-engine'] - info = { - 'expected_version': expected_version, - 'actual_version': actual_version - } + info = {'expected_version': expected_version, 'actual_version': actual_version} if version_parse(actual_version) < version_parse(expected_version): return self._failed(name=name, info=info) else: @@ -410,17 +366,11 @@ def docker_api(self) -> CheckResult: version_info = self._get_docker_version_info() if not version_info: - return self._failed( - name=name, - info='Docker api request failed. Is docker installed?' - ) + return self._failed(name=name, info='Docker api request failed. Is docker installed?') logger.debug('Docker version info %s', version_info) actual_version = version_info['ApiVersion'] expected_version = self.requirements['docker-api'] - info = { - 'expected_version': expected_version, - 'actual_version': actual_version - } + info = {'expected_version': expected_version, 'actual_version': actual_version} if version_parse(actual_version) < version_parse(expected_version): return self._failed(name=name, info=info) else: @@ -435,9 +385,7 @@ def docker_compose(self) -> CheckResult: return self._failed(name=name, info=info) v_cmd_result = run_cmd( - ['docker', 'compose', 'version'], - check_code=False, - separate_stderr=True + ['docker', 'compose', 'version'], check_code=False, separate_stderr=True ) output = v_cmd_result.stdout.decode('utf-8').rstrip() if v_cmd_result.returncode != 0: @@ -468,10 +416,7 @@ def _get_docker_config(self) -> Dict: def _check_docker_alive_option(self, config: Dict) -> Tuple: actual_value = config.get('live-restore', None) if actual_value is not True: - info = ( - 'Docker daemon live-restore option ' - 'should be set as "true"' - ) + info = 'Docker daemon live-restore option should be set as "true"' return False, info else: info = 'Docker daemon live-restore option is set as "true"' @@ -509,16 +454,10 @@ def hosts_config(self) -> CheckResult: return self._failed(name=name, info=info) -def get_checks( - checkers: List[BaseChecker], - check_type: CheckType = CheckType.ALL -) -> FuncList: +def get_checks(checkers: List[BaseChecker], check_type: CheckType = CheckType.ALL) -> FuncList: return list( itertools.chain.from_iterable( - ( - checker.get_checks(check_type=check_type) - for checker in checkers - ) + (checker.get_checks(check_type=check_type) for checker in checkers) ) ) @@ -527,7 +466,7 @@ def get_all_checkers(disk: str, requirements: Dict) -> List[BaseChecker]: return [ MachineChecker(requirements['server'], disk), PackageChecker(requirements['package']), - DockerChecker(requirements['docker']) + DockerChecker(requirements['docker']), ] @@ -535,7 +474,7 @@ def run_checks( disk: str, env_type: str = 'mainnet', config_path: str = CONTAINER_CONFIG_PATH, - check_type: CheckType = CheckType.ALL + check_type: CheckType = CheckType.ALL, ) -> ResultList: logger.info('Executing checks. Type: %s', check_type) requirements = get_static_params(env_type, config_path) diff --git a/node_cli/core/health.py b/node_cli/core/health.py index 247831db..72eb23ed 100644 --- a/node_cli/core/health.py +++ b/node_cli/core/health.py @@ -20,10 +20,7 @@ import json from terminaltables import SingleTable -from node_cli.utils.print_formatters import ( - print_containers, - print_schains_healthchecks -) +from node_cli.utils.print_formatters import print_containers, print_schains_healthchecks from node_cli.utils.helper import error_exit, get_request from node_cli.utils.exit_codes import CLIExitCodes @@ -33,9 +30,7 @@ def get_containers(_all): status, payload = get_request( - blueprint=BLUEPRINT_NAME, - method='containers', - params={'all': _all} + blueprint=BLUEPRINT_NAME, method='containers', params={'all': _all} ) if status == 'ok': print_containers(payload) @@ -44,10 +39,7 @@ def get_containers(_all): def get_schains_checks(json_format: bool = False) -> None: - status, payload = get_request( - blueprint=BLUEPRINT_NAME, - method='schains' - ) + status, payload = get_request(blueprint=BLUEPRINT_NAME, method='schains') if status == 'ok': if not payload: print('No sChains found') @@ -61,10 +53,7 @@ def get_schains_checks(json_format: bool = False) -> None: def get_sgx_info(): - status, payload = get_request( - blueprint=BLUEPRINT_NAME, - method='sgx' - ) + status, payload = get_request(blueprint=BLUEPRINT_NAME, method='sgx') if status == 'ok': data = payload table_data = [ @@ -73,7 +62,7 @@ def get_sgx_info(): ['SGXWallet Version', data['sgx_wallet_version']], ['Node SGX keyname', data['sgx_keyname']], ['Status HTTPS', data['status_https']], - ['Status ZMQ', data['status_zmq']] + ['Status ZMQ', data['status_zmq']], ] table = SingleTable(table_data) print(table.table) diff --git a/node_cli/core/logs.py b/node_cli/core/logs.py index d92cb37c..3060b874 100644 --- a/node_cli/core/logs.py +++ b/node_cli/core/logs.py @@ -23,9 +23,7 @@ import datetime from node_cli.utils.helper import run_cmd, safe_mkdir -from node_cli.utils.docker_utils import ( - save_container_logs, get_containers -) +from node_cli.utils.docker_utils import save_container_logs, get_containers from node_cli.configs import REMOVED_CONTAINERS_FOLDER_PATH, SKALE_TMP_DIR from node_cli.configs.cli_logger import LOG_DATA_PATH @@ -60,7 +58,7 @@ def create_logs_dump(path, filter_container=None): def create_dump_dir(): - time = datetime.datetime.utcnow().strftime("%Y-%m-%d--%H-%M-%S") + time = datetime.datetime.utcnow().strftime('%Y-%m-%d--%H-%M-%S') folder_name = f'skale-logs-dump-{time}' folder_path = os.path.join(SKALE_TMP_DIR, folder_name) containers_path = os.path.join(folder_path, 'containers') diff --git a/node_cli/core/node_options.py b/node_cli/core/node_options.py index 70573a65..49a0ea05 100644 --- a/node_cli/core/node_options.py +++ b/node_cli/core/node_options.py @@ -26,10 +26,7 @@ class NodeOptions: - def __init__( - self, - filepath: str = NODE_OPTIONS_FILEPATH - ): + def __init__(self, filepath: str = NODE_OPTIONS_FILEPATH): self.filepath = filepath init_file(filepath, {}) diff --git a/node_cli/core/ssl/check.py b/node_cli/core/ssl/check.py index d512c777..289bc77e 100644 --- a/node_cli/core/ssl/check.py +++ b/node_cli/core/ssl/check.py @@ -28,7 +28,7 @@ DEFAULT_SSL_CHECK_PORT, SKALED_SSL_TEST_SCRIPT, SSL_CERT_FILEPATH, - SSL_KEY_FILEPATH + SSL_KEY_FILEPATH, ) @@ -41,13 +41,12 @@ def check_cert( port=DEFAULT_SSL_CHECK_PORT, check_type='all', no_client=False, - no_wss=False + no_wss=False, ): if check_type in ('all', 'openssl'): try: check_cert_openssl( - cert_path, key_path, - host='127.0.0.1', port=port, no_client=no_client + cert_path, key_path, host='127.0.0.1', port=port, no_client=no_client ) except Exception as err: logger.exception('Cerificate/key pair is incorrect') @@ -55,10 +54,7 @@ def check_cert( if check_type in ('skaled',): try: - check_cert_skaled( - cert_path, key_path, - host='127.0.0.1', port=port, no_wss=no_wss - ) + check_cert_skaled(cert_path, key_path, host='127.0.0.1', port=port, no_wss=no_wss) except Exception as err: logger.exception('Certificate/key pair is incorrect for skaled') return 'error', f'Skaled ssl check failed. {err}' @@ -72,12 +68,9 @@ def check_cert_openssl( host='127.0.0.1', port=DEFAULT_SSL_CHECK_PORT, no_client=False, - silent=False + silent=False, ): - with openssl_server( - host, port, cert_path, - key_path, silent=silent - ) as serv: + with openssl_server(host, port, cert_path, key_path, silent=silent) as serv: time.sleep(1) code = serv.poll() if code is not None: @@ -89,9 +82,7 @@ def check_cert_openssl( # Connect to ssl server if not no_client: if not check_endpoint(host, port): - raise SSLHealthcheckError( - f'Healthcheck port is closed on {host}:{port}' - ) + raise SSLHealthcheckError(f'Healthcheck port is closed on {host}:{port}') check_ssl_connection(host, port, silent=silent) logger.info('Healthcheck connection passed') @@ -99,28 +90,29 @@ def check_cert_openssl( @contextmanager def openssl_server(host, port, cert_path, key_path, silent=False): ssl_server_cmd = [ - 'openssl', 's_server', - '-cert', cert_path, - '-cert_chain', cert_path, - '-key', key_path, + 'openssl', + 's_server', + '-cert', + cert_path, + '-cert_chain', + cert_path, + '-key', + key_path, '-WWW', - '-accept', f'{host}:{port}', - '-verify_return_error', '-verify', '1' + '-accept', + f'{host}:{port}', + '-verify_return_error', + '-verify', + '1', ] logger.info(f'Staring healthcheck server on port {port} ...') expose_output = not silent - with detached_subprocess( - ssl_server_cmd, expose_output=expose_output - ) as dp: + with detached_subprocess(ssl_server_cmd, expose_output=expose_output) as dp: yield dp def check_cert_skaled( - cert_path, - key_path, - host='127.0.0.1', - port=DEFAULT_SSL_CHECK_PORT, - no_wss=False + cert_path, key_path, host='127.0.0.1', port=DEFAULT_SSL_CHECK_PORT, no_wss=False ): run_skaled_https_healthcheck(cert_path, key_path, host, port) if not no_wss: @@ -128,17 +120,18 @@ def check_cert_skaled( def run_skaled_https_healthcheck( - cert_path, - key_path, - host='127.0.0.1', - port=DEFAULT_SSL_CHECK_PORT + cert_path, key_path, host='127.0.0.1', port=DEFAULT_SSL_CHECK_PORT ): skaled_https_check_cmd = [ SKALED_SSL_TEST_SCRIPT, - '--ssl-cert', cert_path, - '--ssl-key', key_path, - '--bind', host, - '--port', str(port) + '--ssl-cert', + cert_path, + '--ssl-key', + key_path, + '--bind', + host, + '--port', + str(port), ] with detached_subprocess(skaled_https_check_cmd, expose_output=True) as dp: time.sleep(1) @@ -147,24 +140,23 @@ def run_skaled_https_healthcheck( logger.info('Skaled https check server successfully started') else: logger.error('Skaled https check server was failed to start') - raise SSLHealthcheckError( - 'Skaled https check was failed') + raise SSLHealthcheckError('Skaled https check was failed') -def run_skaled_wss_healthcheck( - cert_path, - key_path, - host='127.0.0.1', - port=DEFAULT_SSL_CHECK_PORT -): +def run_skaled_wss_healthcheck(cert_path, key_path, host='127.0.0.1', port=DEFAULT_SSL_CHECK_PORT): skaled_wss_check_cmd = [ SKALED_SSL_TEST_SCRIPT, - '--ssl-cert', cert_path, - '--ssl-key', key_path, - '--bind', host, - '--port', str(port), - '--proto', 'wss', - '--echo' + '--ssl-cert', + cert_path, + '--ssl-key', + key_path, + '--bind', + host, + '--port', + str(port), + '--proto', + 'wss', + '--echo', ] with detached_subprocess(skaled_wss_check_cmd, expose_output=True) as dp: @@ -172,8 +164,7 @@ def run_skaled_wss_healthcheck( code = dp.poll() if code is not None: logger.error('Skaled wss check server was failed to start') - raise SSLHealthcheckError( - 'Skaled wss check was failed') + raise SSLHealthcheckError('Skaled wss check was failed') else: logger.info('Skaled wss check server successfully started') @@ -195,9 +186,13 @@ def check_endpoint(host, port): def check_ssl_connection(host, port, silent=False): logger.info(f'Connecting to public ssl endpoint {host}:{port} ...') ssl_check_cmd = [ - 'openssl', 's_client', - '-connect', f'{host}:{port}', - '-verify_return_error', '-verify', '2' + 'openssl', + 's_client', + '-connect', + f'{host}:{port}', + '-verify_return_error', + '-verify', + '2', ] expose_output = not silent with detached_subprocess(ssl_check_cmd, expose_output=expose_output) as dp: diff --git a/node_cli/core/ssl/status.py b/node_cli/core/ssl/status.py index 31ab4b9a..9b035b1d 100644 --- a/node_cli/core/ssl/status.py +++ b/node_cli/core/ssl/status.py @@ -38,10 +38,9 @@ def cert_status(): if status == 'error': return err_result(CERTS_INVALID_FORMAT) else: - return ok_result(payload={ - 'issued_to': info['issued_to'], - 'expiration_date': info['expiration_date'] - }) + return ok_result( + payload={'issued_to': info['issued_to'], 'expiration_date': info['expiration_date']} + ) def get_cert_info(cert): @@ -50,14 +49,10 @@ def get_cert_info(cert): subject = crypto_cert.get_subject() issued_to = subject.CN expiration_date_raw = crypto_cert.get_notAfter() - expiration_date = parser.parse( - expiration_date_raw - ).strftime('%Y-%m-%dT%H:%M:%S') + expiration_date = parser.parse(expiration_date_raw).strftime('%Y-%m-%dT%H:%M:%S') except Exception as err: logger.exception('Error during parsing certs') return err_result(str(err)) - return ok_result({ - 'subject': subject, - 'issued_to': issued_to, - 'expiration_date': expiration_date - }) + return ok_result( + {'subject': subject, 'issued_to': issued_to, 'expiration_date': expiration_date} + ) diff --git a/node_cli/core/ssl/utils.py b/node_cli/core/ssl/utils.py index a2e71e1f..a5a329dd 100644 --- a/node_cli/core/ssl/utils.py +++ b/node_cli/core/ssl/utils.py @@ -48,11 +48,7 @@ def is_ssl_folder_empty(ssl_path=SSL_FOLDER_PATH): @contextmanager def detached_subprocess(cmd, expose_output=False): logger.debug(f'Starting detached subprocess: {cmd}') - p = subprocess.Popen( - cmd, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - encoding='utf-8' - ) + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf-8') try: yield p finally: diff --git a/node_cli/core/wallet.py b/node_cli/core/wallet.py index d7d129ec..13f980e0 100644 --- a/node_cli/core/wallet.py +++ b/node_cli/core/wallet.py @@ -39,10 +39,7 @@ def get_wallet_info(_format): def send_eth(address: str, amount: float): - json_data = { - 'address': address, - 'amount': amount - } + json_data = {'address': address, 'amount': amount} status, payload = post_request(BLUEPRINT_NAME, 'send-eth', json=json_data) if status == 'ok': msg = TEXTS['wallet']['successful_transfer'] diff --git a/node_cli/migrations/focal_to_jammy.py b/node_cli/migrations/focal_to_jammy.py index dae43820..030312c1 100644 --- a/node_cli/migrations/focal_to_jammy.py +++ b/node_cli/migrations/focal_to_jammy.py @@ -25,7 +25,7 @@ LEGACY_TABLE, POLICY, NFTablesManager, - remove_legacy_saved_rules + remove_legacy_saved_rules, ) from node_cli.utils.helper import run_cmd @@ -39,7 +39,7 @@ '443', # https '53', # dns '3009', # watchdog http - '9100' # node exporter + '9100', # node exporter ] ALLOWED_INCOMING_UDP_PORTS = [ diff --git a/node_cli/operations/volume.py b/node_cli/operations/volume.py index b6de918a..d6d6a966 100644 --- a/node_cli/operations/volume.py +++ b/node_cli/operations/volume.py @@ -31,7 +31,7 @@ FILESTORAGE_MAPPING, SCHAINS_MNT_DIR_REGULAR, SCHAINS_MNT_DIR_SYNC, - SKALE_STATE_DIR + SKALE_STATE_DIR, ) logger = logging.getLogger(__name__) @@ -58,11 +58,7 @@ def ensure_filestorage_mapping(mapping_dir=FILESTORAGE_MAPPING): def sync_docker_lvmpy_repo(env): if os.path.isdir(DOCKER_LVMPY_PATH): shutil.rmtree(DOCKER_LVMPY_PATH) - sync_repo( - DOCKER_LVMPY_REPO_URL, - DOCKER_LVMPY_PATH, - env["DOCKER_LVMPY_STREAM"] - ) + sync_repo(DOCKER_LVMPY_REPO_URL, DOCKER_LVMPY_PATH, env['DOCKER_LVMPY_STREAM']) def docker_lvmpy_update(env): @@ -70,10 +66,7 @@ def docker_lvmpy_update(env): ensure_filestorage_mapping() logger.info('Running docker-lvmpy update script') update_docker_lvmpy_env(env) - run_cmd( - cmd=f'sudo -H -E {DOCKER_LVMPY_PATH}/scripts/update.sh'.split(), - env=env - ) + run_cmd(cmd=f'sudo -H -E {DOCKER_LVMPY_PATH}/scripts/update.sh'.split(), env=env) logger.info('docker-lvmpy update done') @@ -81,10 +74,7 @@ def docker_lvmpy_install(env): sync_docker_lvmpy_repo(env) ensure_filestorage_mapping() update_docker_lvmpy_env(env) - run_cmd( - cmd=f'sudo -H -E {DOCKER_LVMPY_PATH}/scripts/install.sh'.split(), - env=env - ) + run_cmd(cmd=f'sudo -H -E {DOCKER_LVMPY_PATH}/scripts/install.sh'.split(), env=env) logger.info('docker-lvmpy installed') diff --git a/tests/cli/exit_test.py b/tests/cli/exit_test.py index ae6e9530..a3b26ed0 100644 --- a/tests/cli/exit_test.py +++ b/tests/cli/exit_test.py @@ -5,17 +5,14 @@ def test_exit_status(): - payload = { - 'status': 'ACTIVE', - 'data': [{'name': 'test', 'status': 'ACTIVE'}], - 'exit_time': 0 - } + payload = {'status': 'ACTIVE', 'data': [{'name': 'test', 'status': 'ACTIVE'}], 'exit_time': 0} - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'ok'} - ) + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) result = run_command_mock( - 'node_cli.utils.helper.requests.get', resp_mock, status, ['--format', 'json']) + 'node_cli.utils.helper.requests.get', resp_mock, status, ['--format', 'json'] + ) assert result.exit_code == 0 - assert result.output == "{'status': 'ACTIVE', 'data': [{'name': 'test', 'status': 'ACTIVE'}], 'exit_time': 0}\n" # noqa + assert ( + result.output + == "{'status': 'ACTIVE', 'data': [{'name': 'test', 'status': 'ACTIVE'}], 'exit_time': 0}\n" + ) # noqa diff --git a/tests/cli/health_test.py b/tests/cli/health_test.py index b2f2fa3f..de14c7c2 100644 --- a/tests/cli/health_test.py +++ b/tests/cli/health_test.py @@ -6,80 +6,88 @@ OK_LS_RESPONSE_DATA = { 'status': 'ok', - 'payload': - [ - { - 'image': 'skalenetwork/schain:1.46-develop.21', - 'name': 'skale_schain_shapely-alfecca-meridiana', - 'state': { - 'Status': 'running', 'Running': True, - 'Paused': False, 'Restarting': False, - 'OOMKilled': False, 'Dead': False, - 'Pid': 232, 'ExitCode': 0, - 'Error': '', - 'StartedAt': '2020-07-31T11:56:35.732888232Z', - 'FinishedAt': '0001-01-01T00:00:00Z' - } + 'payload': [ + { + 'image': 'skalenetwork/schain:1.46-develop.21', + 'name': 'skale_schain_shapely-alfecca-meridiana', + 'state': { + 'Status': 'running', + 'Running': True, + 'Paused': False, + 'Restarting': False, + 'OOMKilled': False, + 'Dead': False, + 'Pid': 232, + 'ExitCode': 0, + 'Error': '', + 'StartedAt': '2020-07-31T11:56:35.732888232Z', + 'FinishedAt': '0001-01-01T00:00:00Z', + }, + }, + { + 'image': 'skale-admin:latest', + 'name': 'skale_api', + 'state': { + 'Status': 'running', + 'Running': True, + 'Paused': False, + 'Restarting': False, + 'OOMKilled': False, + 'Dead': False, + 'Pid': 6710, + 'ExitCode': 0, + 'Error': '', + 'StartedAt': '2020-07-31T11:55:17.28700307Z', + 'FinishedAt': '0001-01-01T00:00:00Z', }, - { - 'image': 'skale-admin:latest', 'name': 'skale_api', - 'state': { - 'Status': 'running', - 'Running': True, 'Paused': False, - 'Restarting': False, 'OOMKilled': False, - 'Dead': False, 'Pid': 6710, 'ExitCode': 0, - 'Error': '', - 'StartedAt': '2020-07-31T11:55:17.28700307Z', - 'FinishedAt': '0001-01-01T00:00:00Z' - } - } - ] + }, + ], } def test_containers(): - resp_mock = response_mock( - requests.codes.ok, - json_data=OK_LS_RESPONSE_DATA - ) - result = run_command_mock('node_cli.utils.helper.requests.get', - resp_mock, containers) + resp_mock = response_mock(requests.codes.ok, json_data=OK_LS_RESPONSE_DATA) + result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, containers) assert result.exit_code == 0 - assert result.output == ' Name Status Started At Image \n-------------------------------------------------------------------------------------------------------------\nskale_schain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nskale_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa + assert ( + result.output + == ' Name Status Started At Image \n-------------------------------------------------------------------------------------------------------------\nskale_schain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nskale_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa + ) def test_checks(): payload = [ { - "name": "test_schain", - "healthchecks": { - "config_dir": True, - "dkg": False, - "config": False, - "volume": False, - "skaled_container": False, - "ima_container": False, - "firewall_rules": False, - "rpc": False, - "blocks": False - } + 'name': 'test_schain', + 'healthchecks': { + 'config_dir': True, + 'dkg': False, + 'config': False, + 'volume': False, + 'skaled_container': False, + 'ima_container': False, + 'firewall_rules': False, + 'rpc': False, + 'blocks': False, + }, } ] - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'ok'} - ) - result = run_command_mock('node_cli.utils.helper.requests.get', - resp_mock, schains) + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) + result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, schains) assert result.exit_code == 0 - assert result.output == 'sChain Name Config directory DKG Config file Volume Container IMA Firewall RPC Blocks\n-------------------------------------------------------------------------------------------------------------\ntest_schain True False False False False False False False False \n' # noqa + assert ( + result.output + == 'sChain Name Config directory DKG Config file Volume Container IMA Firewall RPC Blocks\n-------------------------------------------------------------------------------------------------------------\ntest_schain True False False False False False False False False \n' # noqa + ) - result = run_command_mock('node_cli.utils.helper.requests.get', - resp_mock, schains, ['--json']) + result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, schains, ['--json']) assert result.exit_code == 0 - assert result.output == '[{"name": "test_schain", "healthchecks": {"config_dir": true, "dkg": false, "config": false, "volume": false, "skaled_container": false, "ima_container": false, "firewall_rules": false, "rpc": false, "blocks": false}}]\n' # noqa + assert ( + result.output + == '[{"name": "test_schain", "healthchecks": {"config_dir": true, "dkg": false, "config": false, "volume": false, "skaled_container": false, "ima_container": false, "firewall_rules": false, "rpc": false, "blocks": false}}]\n' # noqa + ) def test_sgx_status(): @@ -88,14 +96,13 @@ def test_sgx_status(): 'sgx_wallet_version': '1.50.1-stable.0', 'sgx_keyname': 'test_keyname', 'status_zmq': True, - 'status_https': True + 'status_https': True, } - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'ok'} - ) - result = run_command_mock( - 'node_cli.utils.helper.requests.get', resp_mock, sgx) + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) + result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, sgx) assert result.exit_code == 0 - assert result.output == '\x1b(0lqqqqqqqqqqqqqqqqqqqwqqqqqqqqqqqqqqqqqqqqqqqqk\x1b(B\n\x1b(0x\x1b(B SGX info \x1b(0x\x1b(B \x1b(0x\x1b(B\n\x1b(0tqqqqqqqqqqqqqqqqqqqnqqqqqqqqqqqqqqqqqqqqqqqqu\x1b(B\n\x1b(0x\x1b(B Server URL \x1b(0x\x1b(B https://127.0.0.1:1026 \x1b(0x\x1b(B\n\x1b(0x\x1b(B SGXWallet Version \x1b(0x\x1b(B 1.50.1-stable.0 \x1b(0x\x1b(B\n\x1b(0x\x1b(B Node SGX keyname \x1b(0x\x1b(B test_keyname \x1b(0x\x1b(B\n\x1b(0x\x1b(B Status HTTPS \x1b(0x\x1b(B True \x1b(0x\x1b(B\n\x1b(0x\x1b(B Status ZMQ \x1b(0x\x1b(B True \x1b(0x\x1b(B\n\x1b(0mqqqqqqqqqqqqqqqqqqqvqqqqqqqqqqqqqqqqqqqqqqqqj\x1b(B\n' # noqa + assert ( + result.output + == '\x1b(0lqqqqqqqqqqqqqqqqqqqwqqqqqqqqqqqqqqqqqqqqqqqqk\x1b(B\n\x1b(0x\x1b(B SGX info \x1b(0x\x1b(B \x1b(0x\x1b(B\n\x1b(0tqqqqqqqqqqqqqqqqqqqnqqqqqqqqqqqqqqqqqqqqqqqqu\x1b(B\n\x1b(0x\x1b(B Server URL \x1b(0x\x1b(B https://127.0.0.1:1026 \x1b(0x\x1b(B\n\x1b(0x\x1b(B SGXWallet Version \x1b(0x\x1b(B 1.50.1-stable.0 \x1b(0x\x1b(B\n\x1b(0x\x1b(B Node SGX keyname \x1b(0x\x1b(B test_keyname \x1b(0x\x1b(B\n\x1b(0x\x1b(B Status HTTPS \x1b(0x\x1b(B True \x1b(0x\x1b(B\n\x1b(0x\x1b(B Status ZMQ \x1b(0x\x1b(B True \x1b(0x\x1b(B\n\x1b(0mqqqqqqqqqqqqqqqqqqqvqqqqqqqqqqqqqqqqqqqqqqqqj\x1b(B\n' # noqa + ) diff --git a/tests/cli/logs_test.py b/tests/cli/logs_test.py index b292a848..c6fa37d2 100644 --- a/tests/cli/logs_test.py +++ b/tests/cli/logs_test.py @@ -23,11 +23,11 @@ from node_cli.configs import G_CONF_HOME from tests.helper import run_command -from tests.core.core_logs_test import backup_func, CURRENT_DATETIME, TEST_ARCHIVE_PATH # noqa +from tests.core.core_logs_test import backup_func, CURRENT_DATETIME, TEST_ARCHIVE_PATH # noqa @freezegun.freeze_time(CURRENT_DATETIME) -def test_dump(backup_func, removed_containers_folder): # noqa +def test_dump(backup_func, removed_containers_folder): # noqa result = run_command(dump, [G_CONF_HOME]) assert result.exit_code == 0 assert result.output == f'Logs dump created: {TEST_ARCHIVE_PATH}\n' diff --git a/tests/cli/schains_test.py b/tests/cli/schains_test.py index d12e7277..e450800a 100644 --- a/tests/cli/schains_test.py +++ b/tests/cli/schains_test.py @@ -24,8 +24,7 @@ from node_cli.configs import G_CONF_HOME from tests.helper import response_mock, run_command, run_command_mock -from node_cli.cli.schains import (get_schain_config, ls, dkg, show_rules, - repair, info_) +from node_cli.cli.schains import get_schain_config, ls, dkg, show_rules, repair, info_ def test_ls(): @@ -33,26 +32,39 @@ def test_ls(): time.tzset() payload = [ { - 'name': 'test_schain1', 'mainnet_owner': '0x123', - 'index_owner_list': 3, 'part_of_node': 0, - 'lifetime': 5, 'start_date': 1570115385, - 'deposit': 1000000000000000000, 'index': 3, 'generation': 1, 'originator': '0x465', 'options': {'allocation_type': 0} # noqa + 'name': 'test_schain1', + 'mainnet_owner': '0x123', + 'index_owner_list': 3, + 'part_of_node': 0, + 'lifetime': 5, + 'start_date': 1570115385, + 'deposit': 1000000000000000000, + 'index': 3, + 'generation': 1, + 'originator': '0x465', + 'options': {'allocation_type': 0}, # noqa }, { 'name': 'crazy_cats1', 'mainnet_owner': '0x321', - 'index_owner_list': 8, 'part_of_node': 0, - 'lifetime': 5, 'start_date': 1570469410, - 'deposit': 1000000000000000000, 'index': 8, 'generation': 0, 'originator': '0x0', 'options': {'allocation_type': 0} # noqa - } + 'index_owner_list': 8, + 'part_of_node': 0, + 'lifetime': 5, + 'start_date': 1570469410, + 'deposit': 1000000000000000000, + 'index': 8, + 'generation': 0, + 'originator': '0x0', + 'options': {'allocation_type': 0}, # noqa + }, ] - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'ok'} - ) + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, ls) assert result.exit_code == 0 - assert result.output == ' Name Owner Size Lifetime Created At Deposit Generation Originator Type\n--------------------------------------------------------------------------------------------------------------------\ntest_schain1 0x123 0 5 Oct 03 2019 16:09:45 1000000000000000000 1 0x465 0 \ncrazy_cats1 0x321 0 5 Oct 07 2019 18:30:10 1000000000000000000 0 0x0 0 \n' # noqa + assert ( + result.output + == ' Name Owner Size Lifetime Created At Deposit Generation Originator Type\n--------------------------------------------------------------------------------------------------------------------\ntest_schain1 0x123 0 5 Oct 03 2019 16:09:45 1000000000000000000 1 0x465 0 \ncrazy_cats1 0x321 0 5 Oct 07 2019 18:30:10 1000000000000000000 0 0x0 0 \n' # noqa + ) def test_dkg(): @@ -64,68 +76,81 @@ def test_dkg(): 'added_at': 1578497212.645233, 'dkg_status': 2, 'dkg_status_name': 'IN_PROGRESS', - 'is_deleted': False + 'is_deleted': False, } ] - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'ok'} - ) - result = run_command_mock('node_cli.utils.helper.requests.get', - resp_mock, dkg) + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) + result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, dkg) assert result.exit_code == 0 - assert result.output == ' sChain Name DKG Status Added At sChain Status\n---------------------------------------------------------------------\nmelodic-aldhibah IN_PROGRESS Jan 08 2020 15:26:52 Exists \n' # noqa + assert ( + result.output + == ' sChain Name DKG Status Added At sChain Status\n---------------------------------------------------------------------\nmelodic-aldhibah IN_PROGRESS Jan 08 2020 15:26:52 Exists \n' # noqa + ) - result = run_command_mock('node_cli.utils.helper.requests.get', - resp_mock, dkg, ['--all']) + result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, dkg, ['--all']) assert result.exit_code == 0 - assert result.output == ' sChain Name DKG Status Added At sChain Status\n---------------------------------------------------------------------\nmelodic-aldhibah IN_PROGRESS Jan 08 2020 15:26:52 Exists \n' # noqa + assert ( + result.output + == ' sChain Name DKG Status Added At sChain Status\n---------------------------------------------------------------------\nmelodic-aldhibah IN_PROGRESS Jan 08 2020 15:26:52 Exists \n' # noqa + ) def test_get_schain_config(): payload = { 'nodeInfo': { - 'nodeID': 2, 'nodeName': 'testnet-1', - 'basePort': 10011, 'httpRpcPort': 10009, - 'httpsRpcPort': 11118, 'wsRpcPort': 10118, + 'nodeID': 2, + 'nodeName': 'testnet-1', + 'basePort': 10011, + 'httpRpcPort': 10009, + 'httpsRpcPort': 11118, + 'wsRpcPort': 10118, 'wssRpcPort': 13219, - 'bindIP': '123.123.123.123' + 'bindIP': '123.123.123.123', }, 'sChain': { - 'schainID': 1, 'schainName': 'test1', + 'schainID': 1, + 'schainName': 'test1', 'nodes': [ - {'nodeID': 2, - 'nodeName': 'testnet-1', - 'basePort': 10011, - 'httpRpcPort': 10013, - 'httpsRpcPort': 10018, - 'wsRpcPort': 10014, - 'wssRpcPort': 10019, - 'publicKey': 'public_key', - 'owner': '0xe3213', - 'schainIndex': 1, - 'ip': '213.13.123.13', - 'publicIP': '1.1.1.1' - }, - {'nodeID': 0, 'nodeName': 'testnet-2', - 'basePort': 10077, 'httpRpcPort': 10079, - 'httpsRpcPort': 10084, 'wsRpcPort': 10080, - 'wssRpcPort': 10085, - 'publicKey': 'public_key352', - 'owner': '0x323', - 'schainIndex': 2, 'ip': '2.2.2.2', - 'publicIP': '3.3.3.3' - }]} + { + 'nodeID': 2, + 'nodeName': 'testnet-1', + 'basePort': 10011, + 'httpRpcPort': 10013, + 'httpsRpcPort': 10018, + 'wsRpcPort': 10014, + 'wssRpcPort': 10019, + 'publicKey': 'public_key', + 'owner': '0xe3213', + 'schainIndex': 1, + 'ip': '213.13.123.13', + 'publicIP': '1.1.1.1', + }, + { + 'nodeID': 0, + 'nodeName': 'testnet-2', + 'basePort': 10077, + 'httpRpcPort': 10079, + 'httpsRpcPort': 10084, + 'wsRpcPort': 10080, + 'wssRpcPort': 10085, + 'publicKey': 'public_key352', + 'owner': '0x323', + 'schainIndex': 2, + 'ip': '2.2.2.2', + 'publicIP': '3.3.3.3', + }, + ], + }, } - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'ok'} + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) + result = run_command_mock( + 'node_cli.utils.helper.requests.get', resp_mock, get_schain_config, ['test1'] ) - result = run_command_mock('node_cli.utils.helper.requests.get', - resp_mock, - get_schain_config, ['test1']) assert result.exit_code == 0 - assert result.output == "{'nodeInfo': {'basePort': 10011,\n 'bindIP': '123.123.123.123',\n 'httpRpcPort': 10009,\n 'httpsRpcPort': 11118,\n 'nodeID': 2,\n 'nodeName': 'testnet-1',\n 'wsRpcPort': 10118,\n 'wssRpcPort': 13219},\n 'sChain': {'nodes': [{'basePort': 10011,\n 'httpRpcPort': 10013,\n 'httpsRpcPort': 10018,\n 'ip': '213.13.123.13',\n 'nodeID': 2,\n 'nodeName': 'testnet-1',\n 'owner': '0xe3213',\n 'publicIP': '1.1.1.1',\n 'publicKey': 'public_key',\n 'schainIndex': 1,\n 'wsRpcPort': 10014,\n 'wssRpcPort': 10019},\n {'basePort': 10077,\n 'httpRpcPort': 10079,\n 'httpsRpcPort': 10084,\n 'ip': '2.2.2.2',\n 'nodeID': 0,\n 'nodeName': 'testnet-2',\n 'owner': '0x323',\n 'publicIP': '3.3.3.3',\n 'publicKey': 'public_key352',\n 'schainIndex': 2,\n 'wsRpcPort': 10080,\n 'wssRpcPort': 10085}],\n 'schainID': 1,\n 'schainName': 'test1'}}\n" # noqa + assert ( + result.output + == "{'nodeInfo': {'basePort': 10011,\n 'bindIP': '123.123.123.123',\n 'httpRpcPort': 10009,\n 'httpsRpcPort': 11118,\n 'nodeID': 2,\n 'nodeName': 'testnet-1',\n 'wsRpcPort': 10118,\n 'wssRpcPort': 13219},\n 'sChain': {'nodes': [{'basePort': 10011,\n 'httpRpcPort': 10013,\n 'httpsRpcPort': 10018,\n 'ip': '213.13.123.13',\n 'nodeID': 2,\n 'nodeName': 'testnet-1',\n 'owner': '0xe3213',\n 'publicIP': '1.1.1.1',\n 'publicKey': 'public_key',\n 'schainIndex': 1,\n 'wsRpcPort': 10014,\n 'wssRpcPort': 10019},\n {'basePort': 10077,\n 'httpRpcPort': 10079,\n 'httpsRpcPort': 10084,\n 'ip': '2.2.2.2',\n 'nodeID': 0,\n 'nodeName': 'testnet-2',\n 'owner': '0x323',\n 'publicIP': '3.3.3.3',\n 'publicKey': 'public_key352',\n 'schainIndex': 2,\n 'wsRpcPort': 10080,\n 'wssRpcPort': 10085}],\n 'schainID': 1,\n 'schainName': 'test1'}}\n" # noqa + ) def test_schain_rules(): @@ -139,18 +164,19 @@ def test_schain_rules(): {'port': 10005, 'first_ip': '127.0.0.2', 'last_ip': '127.0.0.2'}, {'port': 10007, 'first_ip': None, 'last_ip': None}, {'port': 10008, 'first_ip': None, 'last_ip': None}, - {'port': 10009, 'first_ip': None, 'last_ip': None} + {'port': 10009, 'first_ip': None, 'last_ip': None}, ] } - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'ok'} - ) + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) result = run_command_mock( - 'node_cli.utils.helper.requests.get', resp_mock, show_rules, ['schain-test']) + 'node_cli.utils.helper.requests.get', resp_mock, show_rules, ['schain-test'] + ) assert result.exit_code == 0 print(repr(result.output)) - assert result.output == ' IP range Port \n-----------------------------\n127.0.0.2 - 127.0.0.2 10000\n127.0.0.2 - 127.0.0.2 10001\nAll IPs 10002\nAll IPs 10003\n127.0.0.2 - 127.0.0.2 10004\n127.0.0.2 - 127.0.0.2 10005\nAll IPs 10007\nAll IPs 10008\nAll IPs 10009\n' # noqa + assert ( + result.output + == ' IP range Port \n-----------------------------\n127.0.0.2 - 127.0.0.2 10000\n127.0.0.2 - 127.0.0.2 10001\nAll IPs 10002\nAll IPs 10003\n127.0.0.2 - 127.0.0.2 10004\n127.0.0.2 - 127.0.0.2 10005\nAll IPs 10007\nAll IPs 10008\nAll IPs 10009\n' # noqa + ) def test_repair(tmp_schains_dir): @@ -167,24 +193,29 @@ def test_info(): 'name': 'attractive-ed-asich', 'id': '0xfb3b68013fa494407b691b4b603d84c66076c0a5ac96a7d6b162d7341d74fa61', 'owner': '0x1111111111111111111111111111111111111111', - 'part_of_node': 0, 'dkg_status': 3, 'is_deleted': False, - 'first_run': False, 'repair_mode': False + 'part_of_node': 0, + 'dkg_status': 3, + 'is_deleted': False, + 'first_run': False, + 'repair_mode': False, } - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'ok'} + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'ok'}) + result = run_command_mock( + 'node_cli.utils.helper.requests.get', resp_mock, info_, ['attractive-ed-asich'] + ) + assert ( + result.output + == ' Name Id Owner Part_of_node Dkg_status Is_deleted First_run Repair_mode\n--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\nattractive-ed-asich 0xfb3b68013fa494407b691b4b603d84c66076c0a5ac96a7d6b162d7341d74fa61 0x1111111111111111111111111111111111111111 0 3 False False False \n' # noqa ) - result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, info_, - ['attractive-ed-asich']) - assert result.output == ' Name Id Owner Part_of_node Dkg_status Is_deleted First_run Repair_mode\n--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\nattractive-ed-asich 0xfb3b68013fa494407b691b4b603d84c66076c0a5ac96a7d6b162d7341d74fa61 0x1111111111111111111111111111111111111111 0 3 False False False \n' # noqa assert result.exit_code == 0 payload = ['error'] - resp_mock = response_mock( - requests.codes.ok, - json_data={'payload': payload, 'status': 'error'} + resp_mock = response_mock(requests.codes.ok, json_data={'payload': payload, 'status': 'error'}) + result = run_command_mock( + 'node_cli.utils.helper.requests.get', resp_mock, info_, ['schain not found'] + ) + assert ( + result.output + == f'Command failed with following errors:\n--------------------------------------------------\nerror\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n' # noqa ) - result = run_command_mock('node_cli.utils.helper.requests.get', resp_mock, info_, - ['schain not found']) - assert result.output == f'Command failed with following errors:\n--------------------------------------------------\nerror\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n' # noqa assert result.exit_code == 3 diff --git a/tests/cli/wallet_test.py b/tests/cli/wallet_test.py index 489ec07b..234f4f1a 100644 --- a/tests/cli/wallet_test.py +++ b/tests/cli/wallet_test.py @@ -30,18 +30,12 @@ def test_wallet_info(): response_data = { 'status': 'ok', - 'payload': { - 'address': 'simple_address', - 'eth_balance': 13, - 'skale_balance': 123 - } + 'payload': {'address': 'simple_address', 'eth_balance': 13, 'skale_balance': 123}, } response_mock = MagicMock() response_mock.status_code = requests.codes.ok response_mock.json = Mock(return_value=response_data) - result = run_command_mock('node_cli.utils.helper.requests.get', - response_mock, - wallet_info) + result = run_command_mock('node_cli.utils.helper.requests.get', response_mock, wallet_info) assert result.exit_code == 0 expected = ( '--------------------------------------------------\n' @@ -52,28 +46,22 @@ def test_wallet_info(): ) assert result.output == expected - result = run_command_mock('node_cli.utils.helper.requests.get', - response_mock, - wallet_info, - ['--format', 'json']) - assert result.exit_code == 0 - expected = ( - "{\"address\": \"simple_address\", " - "\"eth_balance\": 13, \"skale_balance\": 123}\n" + result = run_command_mock( + 'node_cli.utils.helper.requests.get', response_mock, wallet_info, ['--format', 'json'] ) + assert result.exit_code == 0 + expected = '{"address": "simple_address", "eth_balance": 13, "skale_balance": 123}\n' assert result.output == expected def test_wallet_send(): - resp_mock = response_mock( - requests.codes.ok, - {'status': 'ok', 'payload': None} - ) + resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) result = run_command_mock( 'node_cli.utils.helper.requests.post', resp_mock, send, - ['0x00000000000000000000000000000000', '10', '--yes']) + ['0x00000000000000000000000000000000', '10', '--yes'], + ) assert result.exit_code == 0 assert result.output == 'Funds were successfully transferred\n' # noqa @@ -87,6 +75,10 @@ def test_wallet_send_with_error(): 'node_cli.utils.helper.requests.post', resp_mock, send, - ['0x00000000000000000000000000000000', '10', '--yes']) + ['0x00000000000000000000000000000000', '10', '--yes'], + ) assert result.exit_code == 3 - assert result.output == f'Command failed with following errors:\n--------------------------------------------------\nStrange error\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n' # noqa + assert ( + result.output + == f'Command failed with following errors:\n--------------------------------------------------\nStrange error\n--------------------------------------------------\nYou can find more info in {G_CONF_HOME}.skale/.skale-cli-log/debug-node-cli.log\n' # noqa + ) diff --git a/tests/core/core_checks_test.py b/tests/core/core_checks_test.py index 6e75bbdd..b490b1ee 100644 --- a/tests/core/core_checks_test.py +++ b/tests/core/core_checks_test.py @@ -19,7 +19,7 @@ MachineChecker, merge_reports, PackageChecker, - save_report + save_report, ) @@ -31,18 +31,10 @@ def requirements_data(): 'cpu_physical': 1, 'memory': 100, 'swap': 100, - 'disk': 100000000 + 'disk': 100000000, }, - 'package': { - 'iptables_persistant': '0.0.0', - 'lvm2': '0.0.0', - 'test-package': '2.2.2' - }, - 'docker': { - 'docker-engine': '0.0.0', - 'docker-api': '0.0.0', - 'docker-compose': '1.27.4' - } + 'package': {'iptables_persistant': '0.0.0', 'lvm2': '0.0.0', 'test-package': '2.2.2'}, + 'docker': {'docker-engine': '0.0.0', 'docker-api': '0.0.0', 'docker-compose': '1.27.4'}, } @@ -154,7 +146,7 @@ def test_checks_machine_check(server_req): {'name': 'disk', 'status': 'ok'}, {'name': 'memory', 'status': 'ok'}, {'name': 'network', 'status': 'ok'}, - {'name': 'swap', 'status': 'ok'} + {'name': 'swap', 'status': 'ok'}, ] @@ -233,10 +225,7 @@ def test_checks_docker_compose_no_pkg(docker_req): r.status == 'ok' -def test_checks_docker_compose_invalid_version( - docker_req, - docker_compose_pkg_1_24_1 -): +def test_checks_docker_compose_invalid_version(docker_req, docker_compose_pkg_1_24_1): checker = DockerChecker(docker_req) r = checker.docker_compose() r.name == 'docker-compose' @@ -245,16 +234,12 @@ def test_checks_docker_compose_invalid_version( def test_checks_docker_config(docker_req): checker = DockerChecker(docker_req) - valid_config = { - 'live-restore': True - } + valid_config = {'live-restore': True} r = checker._check_docker_alive_option(valid_config) assert r[0] is True assert r[1] == 'Docker daemon live-restore option is set as "true"' - invalid_config = { - 'live-restore': False - } + invalid_config = {'live-restore': False} r = checker._check_docker_alive_option(invalid_config) assert r[0] is False assert r[1] == 'Docker daemon live-restore option should be set as "true"' @@ -274,16 +259,22 @@ def test_checks_docker_hosts(docker_req): r = checker._check_docker_hosts_option(invalid_config) assert r == ( False, - "Docker daemon hosts is misconfigured. Missing hosts: ['fd://', 'unix:///var/run/skale/docker.sock']" # noqa + "Docker daemon hosts is misconfigured. Missing hosts: ['fd://', 'unix:///var/run/skale/docker.sock']", # noqa ) invalid_config = {'hosts': ['http://127.0.0.1:8080']} r = checker._check_docker_hosts_option(invalid_config) - assert r == (False, "Docker daemon hosts is misconfigured. Missing hosts: ['fd://', 'unix:///var/run/skale/docker.sock']") # noqa + assert r == ( + False, + "Docker daemon hosts is misconfigured. Missing hosts: ['fd://', 'unix:///var/run/skale/docker.sock']", + ) # noqa invalid_config = {'hosts': ['fd://']} r = checker._check_docker_hosts_option(invalid_config) - assert r == (False, "Docker daemon hosts is misconfigured. Missing hosts: ['unix:///var/run/skale/docker.sock']") # noqa + assert r == ( + False, + "Docker daemon hosts is misconfigured. Missing hosts: ['unix:///var/run/skale/docker.sock']", + ) # noqa def test_checks_docker_pre_post_install_checks(docker_req): @@ -367,17 +358,17 @@ def test_merge_report(): old_report = [ {'name': 'test1', 'status': 'ok', 'info': 'Test'}, {'name': 'test2', 'status': 'failed', 'info': 'Test1'}, - {'name': 'test3', 'status': 'failed', 'info': 'Test1'} + {'name': 'test3', 'status': 'failed', 'info': 'Test1'}, ] new_report = [ {'name': 'test1', 'status': 'ok', 'info': 'Test'}, - {'name': 'test2', 'status': 'ok', 'info': 'Test1'} + {'name': 'test2', 'status': 'ok', 'info': 'Test1'}, ] report = merge_reports(old_report, new_report) assert report == [ {'name': 'test1', 'status': 'ok', 'info': 'Test'}, {'name': 'test2', 'status': 'ok', 'info': 'Test1'}, - {'name': 'test3', 'status': 'failed', 'info': 'Test1'} + {'name': 'test3', 'status': 'failed', 'info': 'Test1'}, ] diff --git a/tests/core/core_logs_test.py b/tests/core/core_logs_test.py index a82c3234..06494d2e 100644 --- a/tests/core/core_logs_test.py +++ b/tests/core/core_logs_test.py @@ -44,10 +44,7 @@ def backup_func(): def skale_container(): client = docker_client() container = client.containers.run( - image=TEST_IMAGE, - name=TEST_SKALE_NAME, - detach=True, - entrypoint=TEST_ENTRYPOINT + image=TEST_IMAGE, name=TEST_SKALE_NAME, detach=True, entrypoint=TEST_ENTRYPOINT ) time.sleep(10) try: @@ -77,8 +74,8 @@ def test_create_logs_dump(backup_func, skale_container, removed_containers_folde content = data_file.readlines() assert content == [ 'Hello, SKALE!\n', - '================================================================================\n', # noqa - 'Hello, SKALE!\n' + '================================================================================\n', # noqa + 'Hello, SKALE!\n', ] assert os.path.exists(os.path.join(TEST_ARCHIVE_FOLDER_PATH, 'removed_containers')) diff --git a/tests/core/host/docker_config_test.py b/tests/core/host/docker_config_test.py index 5a93331a..87eb391c 100644 --- a/tests/core/host/docker_config_test.py +++ b/tests/core/host/docker_config_test.py @@ -62,7 +62,7 @@ def test_ensure_service_overriden_config(tmp_dir): '[Service]', 'ExecStart=', 'ExecStart=/usr/bin/dockerd', - 'ExecStartPre=/bin/mkdir -p /var/run/skale' + 'ExecStartPre=/bin/mkdir -p /var/run/skale', ] ) assert r == DockerConfigResult.CHANGED @@ -74,7 +74,7 @@ def test_ensure_service_overriden_config(tmp_dir): '[Service]', 'ExecStart=', 'ExecStart=/usr/bin/dockerd', - 'ExecStartPre=/bin/mkdir -p /var/run/skale' + 'ExecStartPre=/bin/mkdir -p /var/run/skale', ] ) assert r == DockerConfigResult.UNCHANGED @@ -93,10 +93,7 @@ def test_ensure_docker_daemon_config(tmp_dir): with open(daemon_config_path, 'r') as daemon_config_file: conf = json.load(daemon_config_file) assert conf['live-restore'] is True - assert conf['hosts'] == [ - 'fd://', - 'unix:///var/run/skale/docker.sock' - ] + assert conf['hosts'] == ['fd://', 'unix:///var/run/skale/docker.sock'] assert r == DockerConfigResult.CHANGED conf.pop('hosts') @@ -108,10 +105,7 @@ def test_ensure_docker_daemon_config(tmp_dir): with open(daemon_config_path, 'r') as daemon_config_file: conf = json.load(daemon_config_file) assert conf['live-restore'] is True - assert conf['hosts'] == [ - 'fd://', - 'unix:///var/run/skale/docker.sock' - ] + assert conf['hosts'] == ['fd://', 'unix:///var/run/skale/docker.sock'] assert conf['test'] == 'TEST' assert r == DockerConfigResult.CHANGED diff --git a/tests/core/host/kernel_config_test.py b/tests/core/host/kernel_config_test.py index 92e4e8e1..5ea64f93 100644 --- a/tests/core/host/kernel_config_test.py +++ b/tests/core/host/kernel_config_test.py @@ -2,10 +2,7 @@ import pytest -from node_cli.core.host import ( - is_btrfs_module_autoloaded, - ensure_btrfs_kernel_module_autoloaded -) +from node_cli.core.host import is_btrfs_module_autoloaded, ensure_btrfs_kernel_module_autoloaded @pytest.fixture diff --git a/tests/core/migration_test.py b/tests/core/migration_test.py index 7509bc14..585e6fb0 100644 --- a/tests/core/migration_test.py +++ b/tests/core/migration_test.py @@ -41,7 +41,10 @@ def test_migration(base_rules): migrate() res = run_cmd(['iptables', '-S']) output = res.stdout.decode('utf-8') - assert output == f'-P INPUT ACCEPT\n-P FORWARD ACCEPT\n-P OUTPUT ACCEPT\n-N {CUSTOM_CHAIN_NAME}\n-A INPUT -p tcp -m tcp --dport 22 -j ACCEPT\n-A INPUT -p udp -m udp --dport 53 -j ACCEPT\n-A {CUSTOM_CHAIN_NAME} -p tcp -m tcp --dport 2222 -j ACCEPT\n' # noqa + assert ( + output + == f'-P INPUT ACCEPT\n-P FORWARD ACCEPT\n-P OUTPUT ACCEPT\n-N {CUSTOM_CHAIN_NAME}\n-A INPUT -p tcp -m tcp --dport 22 -j ACCEPT\n-A INPUT -p udp -m udp --dport 53 -j ACCEPT\n-A {CUSTOM_CHAIN_NAME} -p tcp -m tcp --dport 2222 -j ACCEPT\n' # noqa + ) nft = NFTablesManager(family='ip', table='filter') assert nft.get_rules(chain='INPUT') == [ { diff --git a/tests/docker_utils_test.py b/tests/docker_utils_test.py index 2d61a489..fd3e8d3f 100644 --- a/tests/docker_utils_test.py +++ b/tests/docker_utils_test.py @@ -5,11 +5,7 @@ import mock import pytest -from node_cli.utils.docker_utils import ( - docker_cleanup, - save_container_logs, - safe_rm -) +from node_cli.utils.docker_utils import docker_cleanup, save_container_logs, safe_rm from node_cli.configs import REMOVED_CONTAINERS_FOLDER_PATH @@ -19,10 +15,7 @@ def simple_container(dclient, simple_image, docker_hc): c = None try: info = dclient.api.create_container( - simple_image, - detach=True, - name=name, - host_config=docker_hc + simple_image, detach=True, name=name, host_config=docker_hc ) c = dclient.containers.get(info['Id']) c.restart() @@ -57,7 +50,7 @@ def test_save_container_logs(simple_container, tmp_dir_path): 'INFO:__main__:Test 7\n', 'INFO:__main__:Test 8\n', 'INFO:__main__:Test 9\n', - 'INFO:__main__:Waiting\n' + 'INFO:__main__:Waiting\n', ] save_container_logs(simple_container, log_path, head=10, tail=5) with open(log_path) as log_file: @@ -73,7 +66,7 @@ def test_save_container_logs(simple_container, tmp_dir_path): 'INFO:__main__:Test 7\n', 'INFO:__main__:Test 8\n', 'INFO:__main__:Test 9\n', - 'INFO:__main__:Waiting\n' + 'INFO:__main__:Waiting\n', ] diff --git a/tests/helper.py b/tests/helper.py index 805fcf51..f2209088 100644 --- a/tests/helper.py +++ b/tests/helper.py @@ -29,33 +29,20 @@ TEST_SCHAINS_MNT_DIR_SYNC = 'tests/tmp' -TEST_META_V1 = { - 'version': '0.1.1', - 'config_stream': 'develop' -} - -TEST_META_V2 = { - 'version': '0.1.1', - 'config_stream': 'develop', - 'docker_lvmpy_stream': '1.1.2' +TEST_META_V1 = {'version': '0.1.1', 'config_stream': 'develop'} -} +TEST_META_V2 = {'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_stream': '1.1.2'} TEST_META_V3 = { 'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_stream': '1.1.2', 'os_id': 'ubuntu', - 'os_version': '18.04' + 'os_version': '18.04', } -def response_mock( - status_code=0, - json_data=None, - headers=None, - raw=None -): +def response_mock(status_code=0, json_data=None, headers=None, raw=None): result = MagicMock() result.status_code = status_code @@ -75,10 +62,8 @@ def run_command(command, params=[], input=''): return runner.invoke(command, params, input=input) -def run_command_mock(mock_call_path, response_mock, - command, params=[], input=''): - with mock.patch(mock_call_path, - new=request_mock(response_mock)): +def run_command_mock(mock_call_path, response_mock, command, params=[], input=''): + with mock.patch(mock_call_path, new=request_mock(response_mock)): return run_command(command, params, input=input) diff --git a/tests/resources_test.py b/tests/resources_test.py index 19755e03..39effaf7 100644 --- a/tests/resources_test.py +++ b/tests/resources_test.py @@ -9,12 +9,45 @@ from node_cli.core.resources import ( compose_resource_allocation_config, update_resource_allocation, - get_cpu_alloc, get_memory_alloc, verify_disk_size + get_cpu_alloc, + get_memory_alloc, + verify_disk_size, ) from node_cli.utils.helper import write_json, safe_load_yml -SCHAIN_VOLUME_PARTS = {'large': {'max_consensus_storage_bytes': 21311992627, 'max_file_storage_bytes': 21311992627, 'max_reserved_storage_bytes': 7103997542, 'max_skaled_leveldb_storage_bytes': 21311992627}, 'medium': {'max_consensus_storage_bytes': 2663999078, 'max_file_storage_bytes': 2663999078, 'max_reserved_storage_bytes': 887999692, 'max_skaled_leveldb_storage_bytes': 2663999078}, 'small': {'max_consensus_storage_bytes': 166499942, 'max_file_storage_bytes': 166499942, 'max_reserved_storage_bytes': 55499980, 'max_skaled_leveldb_storage_bytes': 166499942}, 'test': {'max_consensus_storage_bytes': 2663999078, 'max_file_storage_bytes': 2663999078, 'max_reserved_storage_bytes': 887999692, 'max_skaled_leveldb_storage_bytes': 2663999078}, 'test4': {'max_consensus_storage_bytes': 2663999078, 'max_file_storage_bytes': 2663999078, 'max_reserved_storage_bytes': 887999692, 'max_skaled_leveldb_storage_bytes': 2663999078}} # noqa +SCHAIN_VOLUME_PARTS = { + 'large': { + 'max_consensus_storage_bytes': 21311992627, + 'max_file_storage_bytes': 21311992627, + 'max_reserved_storage_bytes': 7103997542, + 'max_skaled_leveldb_storage_bytes': 21311992627, + }, + 'medium': { + 'max_consensus_storage_bytes': 2663999078, + 'max_file_storage_bytes': 2663999078, + 'max_reserved_storage_bytes': 887999692, + 'max_skaled_leveldb_storage_bytes': 2663999078, + }, + 'small': { + 'max_consensus_storage_bytes': 166499942, + 'max_file_storage_bytes': 166499942, + 'max_reserved_storage_bytes': 55499980, + 'max_skaled_leveldb_storage_bytes': 166499942, + }, + 'test': { + 'max_consensus_storage_bytes': 2663999078, + 'max_file_storage_bytes': 2663999078, + 'max_reserved_storage_bytes': 887999692, + 'max_skaled_leveldb_storage_bytes': 2663999078, + }, + 'test4': { + 'max_consensus_storage_bytes': 2663999078, + 'max_file_storage_bytes': 2663999078, + 'max_reserved_storage_bytes': 887999692, + 'max_skaled_leveldb_storage_bytes': 2663999078, + }, +} # noqa DEFAULT_ENV_TYPE = 'devnet' @@ -67,7 +100,12 @@ def test_generate_resource_allocation_config(): assert resource_allocation_config['schain']['disk']['large'] == 71039975424 assert resource_allocation_config['ima']['cpu_shares'] == { - 'large': 204, 'medium': 25, 'small': 1, 'test': 25, 'test4': 25} + 'large': 204, + 'medium': 25, + 'small': 1, + 'test': 25, + 'test4': 25, + } assert isinstance(resource_allocation_config['ima']['mem'], dict) assert resource_allocation_config['schain']['volume_limits'] == SCHAIN_VOLUME_PARTS @@ -80,10 +118,7 @@ def test_update_allocation_config(resource_alloc_config): assert json.load(jfile) != INITIAL_CONFIG -def test_get_static_disk_alloc_devnet( - params_by_env_type, - schain_allocation_data -): +def test_get_static_disk_alloc_devnet(params_by_env_type, schain_allocation_data): env_configs = params_by_env_type['envs']['devnet'] block_device = '/dev/test' with mock.patch('node_cli.core.resources.get_disk_size', return_value=SMALL_DISK_SIZE): @@ -101,7 +136,7 @@ def test_get_static_disk_alloc_devnet( 'medium': 8879996928, 'small': 554999808, 'test': 8879996928, - 'test4': 8879996928 + 'test4': 8879996928, } @@ -164,5 +199,5 @@ def test_leveldb_limits(): 'medium': {'contract_storage': 1598399446, 'db_storage': 532799815}, 'small': {'contract_storage': 99899965, 'db_storage': 33299988}, 'test': {'contract_storage': 1598399446, 'db_storage': 532799815}, - 'test4': {'contract_storage': 1598399446, 'db_storage': 532799815} + 'test4': {'contract_storage': 1598399446, 'db_storage': 532799815}, } diff --git a/tests/routes_test.py b/tests/routes_test.py index 9c00b8f1..ce0adc4e 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -1,6 +1,10 @@ import pytest -from node_cli.configs.routes import (route_exists, get_route, get_all_available_routes, - RouteNotFoundException) +from node_cli.configs.routes import ( + route_exists, + get_route, + get_all_available_routes, + RouteNotFoundException, +) ALL_V1_ROUTES = [ @@ -14,23 +18,19 @@ '/api/v1/node/exit/status', '/api/v1/node/set-domain-name', '/api/v1/node/update-safe', - '/api/v1/health/containers', '/api/v1/health/schains', '/api/v1/health/sgx', - '/api/v1/schains/config', '/api/v1/schains/list', '/api/v1/schains/dkg-statuses', '/api/v1/schains/firewall-rules', '/api/v1/schains/repair', '/api/v1/schains/get', - '/api/v1/ssl/status', '/api/v1/ssl/upload', - '/api/v1/wallet/info', - '/api/v1/wallet/send-eth' + '/api/v1/wallet/send-eth', ] diff --git a/tests/simple_container/main.py b/tests/simple_container/main.py index 00d109cc..177f2c68 100644 --- a/tests/simple_container/main.py +++ b/tests/simple_container/main.py @@ -9,7 +9,7 @@ handlers=[ StreamHandler(), ], - level=logging.INFO + level=logging.INFO, ) logger = logging.getLogger(__name__) diff --git a/tests/tools_meta_test.py b/tests/tools_meta_test.py index 431533db..9a217233 100644 --- a/tests/tools_meta_test.py +++ b/tests/tools_meta_test.py @@ -2,10 +2,14 @@ from node_cli.configs import META_FILEPATH from node_cli.utils.meta import ( - CliMeta, compose_default_meta, - DEFAULT_CONFIG_STREAM, DEFAULT_VERSION, - ensure_meta, get_meta_info, - save_meta, update_meta + CliMeta, + compose_default_meta, + DEFAULT_CONFIG_STREAM, + DEFAULT_VERSION, + ensure_meta, + get_meta_info, + save_meta, + update_meta, ) from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3 @@ -63,8 +67,13 @@ def test_save_meta(meta_file_v2): def test_update_meta_from_v2_to_v3(meta_file_v2): old_meta = get_meta_info() - update_meta(version='3.3.3', config_stream='1.1.1', - docker_lvmpy_stream='1.2.2', os_id='debian', os_version='11') + update_meta( + version='3.3.3', + config_stream='1.1.1', + docker_lvmpy_stream='1.2.2', + os_id='debian', + os_version='11', + ) meta = get_meta_info() assert meta.version == '3.3.3' assert meta.config_stream == '1.1.1' @@ -75,8 +84,13 @@ def test_update_meta_from_v2_to_v3(meta_file_v2): def test_update_meta_from_v1(meta_file_v1): - update_meta(version='4.4.4', config_stream='beta', - docker_lvmpy_stream='1.3.3', os_id='debian', os_version='11') + update_meta( + version='4.4.4', + config_stream='beta', + docker_lvmpy_stream='1.3.3', + os_id='debian', + os_version='11', + ) meta = get_meta_info() assert meta.version == '4.4.4' assert meta.config_stream == 'beta' @@ -86,8 +100,13 @@ def test_update_meta_from_v1(meta_file_v1): def test_update_meta_from_v3(meta_file_v3): - update_meta(version='5.5.5', config_stream='stable', - docker_lvmpy_stream='1.2.3', os_id='ubuntu', os_version='20.04') + update_meta( + version='5.5.5', + config_stream='stable', + docker_lvmpy_stream='1.2.3', + os_id='ubuntu', + os_version='20.04', + ) meta = get_meta_info() assert meta.version == '5.5.5' assert meta.config_stream == 'stable' diff --git a/tests/utils/decorators_test.py b/tests/utils/decorators_test.py index e67e1f04..4c659984 100644 --- a/tests/utils/decorators_test.py +++ b/tests/utils/decorators_test.py @@ -11,6 +11,7 @@ def test_check_not_inited(): @check_not_inited def requires_not_inited_node(): pass + with mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False): requires_not_inited_node() with mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True): @@ -22,6 +23,7 @@ def test_check_inited(): @check_inited def requires_inited_node(): pass + with mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True): requires_inited_node() with mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False): @@ -33,6 +35,7 @@ def test_check_user(mocked_g_config): @check_user def this_checks_user(): pass + generate_g_config_file(GLOBAL_SKALE_DIR, GLOBAL_SKALE_CONF_FILEPATH) this_checks_user() write_json(GLOBAL_SKALE_CONF_FILEPATH, {'user': 'skaletest'}) diff --git a/tests/utils/global_config_test.py b/tests/utils/global_config_test.py index e159d51f..cb4fb05a 100644 --- a/tests/utils/global_config_test.py +++ b/tests/utils/global_config_test.py @@ -1,4 +1,3 @@ - import os import mock from node_cli.utils.global_config import read_g_config, generate_g_config_file From 101e97fbc76d253eabaec422d57c4eca0d6ae92e Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 15 Apr 2025 11:31:26 +0100 Subject: [PATCH 025/332] Fix memory check in preinstall --- node_cli/core/checks.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index 78a66fa7..f9d42212 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -219,8 +219,7 @@ def cpu_physical(self) -> CheckResult: @preinstall def memory(self) -> CheckResult: name = 'memory' - mem_info = (psutil.virtual_memory().total,) - actual = mem_info[0] + actual = psutil.virtual_memory().total expected = self.requirements['memory'] actual_gb = round(actual / 1024**3, 2) expected_gb = round(expected / 1024**3, 2) From c0bd91dce7065dda4f3c07fdb32f2107662519e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 17 Apr 2025 19:03:23 +0100 Subject: [PATCH 026/332] Add initial Mirage node CLI structure and boot commands Implements the foundational structure for the Mirage node type within the node-cli, including the build system integration and the core `mirage boot` commands. Changes include: - Added a new 'mirage' build type to `scripts/build.sh` and related scripts to generate the `skale-*-mirage` binary. - Created new CLI files `node_cli/cli/mirage_boot.py` and `node_cli/cli/mirage_node.py`. - Added the `mirage boot` command group with subcommands: - `init`: Implements logic to run the `mirage-boot` service from `docker-compose-mirage.yml`, ensuring `SKALE_NETWORK_TYPE=mirage` is set. - `register`: Reuses existing core registration logic. - `signature`: Reuses existing core signature logic. - `migrate`: Implements logic to switch from mirage boot to mirage main. - Integrated the base `mirage` command group and existing `logs`, `wallet`, `ssl` groups into `main.py` for the `mirage` build type. - Updated `README.md` with installation instructions for the `mirage` binary. --- .github/workflows/publish.yml | 59 +++++++++++++ Dockerfile | 3 +- README.md | 26 +++--- node_cli/cli/mirage_boot.py | 87 +++++++++++++++++++ node_cli/cli/mirage_node.py | 39 +++++++++ node_cli/configs/__init__.py | 1 + node_cli/configs/env.py | 53 ++++++++---- node_cli/core/checks.py | 12 ++- node_cli/core/mirage_boot.py | 77 +++++++++++++++++ node_cli/core/nginx.py | 8 +- node_cli/core/node.py | 81 +++++++++++------- node_cli/main.py | 12 +++ node_cli/operations/__init__.py | 2 + node_cli/operations/base.py | 112 ++++++++++++++++++++----- node_cli/operations/mirage_boot.py | 19 +++++ node_cli/utils/docker_utils.py | 129 +++++++++++++++++++++-------- node_cli/utils/meta.py | 15 +++- scripts/build.sh | 4 +- scripts/generate_info.sh | 2 +- tests/core/core_node_test.py | 16 ++-- text.yml | 8 ++ 21 files changed, 635 insertions(+), 130 deletions(-) create mode 100644 node_cli/cli/mirage_boot.py create mode 100644 node_cli/cli/mirage_node.py create mode 100644 node_cli/core/mirage_boot.py create mode 100644 node_cli/operations/mirage_boot.py diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index abdf44a9..a043862f 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -178,3 +178,62 @@ jobs: asset_path: /home/ubuntu/dist/sha512sum asset_name: ${{ matrix.asset_name }}.sha512 asset_content_type: text/plain + + build_and_publish_mirage: + if: github.event.pull_request.merged + needs: create_release + name: Build and publish for ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - os: ubuntu-22.04 + asset_name: skale-${{ needs.create_release.outputs.version }}-Linux-x86_64-mirage + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.11 + uses: actions/setup-python@v1 + with: + python-version: 3.11 + + - name: Install ubuntu dependencies + if: matrix.os == 'ubuntu-22.04' + run: | + sudo apt-get update + + - name: Checkout submodules + run: git submodule update --init + + - name: Build mirage release binary + run: | + mkdir -p ./dist + docker build . -t node-cli-builder + docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} mirage + ls -altr /home/ubuntu/dist/ + docker rm -f $(docker ps -aq) + + - name: Save sha512sum + run: | + sudo sha512sum /home/ubuntu/dist/${{ matrix.asset_name }} | sudo tee > /dev/null /home/ubuntu/dist/sha512sum + + - name: Upload release mirage CLI + id: upload-mirage-release-asset + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ needs.create_release.outputs.upload_url }} + asset_path: /home/ubuntu/dist/${{ matrix.asset_name }} + asset_name: ${{ matrix.asset_name }} + asset_content_type: application/octet-stream + + - name: Upload release mirage CLI checksum + id: upload-mirage-release-checksum + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ needs.create_release.outputs.upload_url }} + asset_path: /home/ubuntu/dist/sha512sum + asset_name: ${{ matrix.asset_name }}.sha512 + asset_content_type: text/plain diff --git a/Dockerfile b/Dockerfile index c77efb4b..c2fc6972 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,7 +24,6 @@ COPY . . ENV PATH=/app/buildvenv/bin:$PATH ENV PYTHONPATH="{PYTHONPATH}:/usr/lib/python3/dist-packages" -RUN python3.11 -m venv /app/buildvenv && \ - pip install --upgrade pip && \ +RUN pip install --upgrade pip && \ pip install wheel setuptools==63.2.0 && \ pip install -e '.[dev]' diff --git a/README.md b/README.md index b2b2a95e..32276159 100644 --- a/README.md +++ b/README.md @@ -9,17 +9,17 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line ## Table of Contents 1. [Installation](#installation) -2. [CLI usage](#cli-usage) - 2.1 [Top level commands](#top-level-commands) - 2.2 [Node](#node-commands) - 2.3 [Wallet](#wallet-commands) - 2.4 [sChains](#schain-commands) - 2.5 [Health](#health-commands) - 2.6 [SSL](#ssl-commands) - 2.7 [Logs](#logs-commands) +2. [CLI usage](#cli-usage) + 2.1 [Top level commands](#top-level-commands) + 2.2 [Node](#node-commands) + 2.3 [Wallet](#wallet-commands) + 2.4 [sChains](#schain-commands) + 2.5 [Health](#health-commands) + 2.6 [SSL](#ssl-commands) + 2.7 [Logs](#logs-commands) 2.8 [Resources allocation](#resources-allocation-commands) -3. [Sync CLI usage](#sync-cli-usage) - 3.1 [Top level commands](#top-level-commands-sync) +3. [Sync CLI usage](#sync-cli-usage) + 3.1 [Top level commands](#top-level-commands-sync) 3.2 [Sync node commands](#sync-node-commands) 4. [Exit codes](#exit-codes) 5. [Development](#development) @@ -42,6 +42,12 @@ For Sync node version: VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-sync > /usr/local/bin/skale" ``` +For Mirage node version: + +```shell +VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-mirage > /usr/local/bin/mirage" +``` + - Apply executable permissions to the downloaded binary: ```shell diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py new file mode 100644 index 00000000..81da33b7 --- /dev/null +++ b/node_cli/cli/mirage_boot.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import click + +from node_cli.core.node import get_node_signature, register_node as register +from node_cli.core.mirage_boot import init, migrate +from node_cli.configs import DEFAULT_NODE_BASE_PORT +from node_cli.utils.helper import streamed_cmd, IP_TYPE, error_exit, abort_if_false + + +@click.group('boot', help='Commands for the Mirage Boot phase.') +def mirage_boot_cli(): + pass + + +@mirage_boot_cli.command('init', help='Initialize Mirage node (Boot Phase).') +@click.argument('env_file') +@streamed_cmd +def init_boot(env_file): + init(env_file) + + +@mirage_boot_cli.command( + 'register', help='Register Mirage node in SKALE Manager (during Boot Phase).' +) +@click.option( + '--name', '-n', required=True, prompt='Enter mirage node name', help='Mirage node name' +) +@click.option( + '--ip', + prompt='Enter node public IP', + type=IP_TYPE, + help='Public IP for RPC connections & consensus (required)', +) +@click.option( + '--port', '-p', default=DEFAULT_NODE_BASE_PORT, type=int, help='Base port for node sChains' +) +@click.option('--domain', '-d', prompt='Enter node domain name', type=str, help='Node domain name') +@streamed_cmd +def register_boot(name, ip, port, domain): + register(name=name, p2p_ip=ip, public_ip=ip, port=port, domain_name=domain) + + +@mirage_boot_cli.command( + 'signature', help='Get mirage node signature for a validator ID (during Boot Phase).' +) +@click.argument('validator_id') +def signature_boot(validator_id): + res = get_node_signature(validator_id) + if isinstance(res, dict) and 'error' in res: + error_exit(f'Error getting signature: {res.get("message", res)}') + print(f'Signature: {res}') + + +@mirage_boot_cli.command( + 'migrate', help='Migrate mirage node from Mirage Boot Phase to Mirage Main Phase.' +) +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to update SKALE node software?', +) +@click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) +@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) +@click.argument('env_file') +@streamed_cmd +def migrate_boot(env_file, pull_config_for_schain, unsafe_ok): + migrate(env_file, pull_config_for_schain, unsafe_ok) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py new file mode 100644 index 00000000..02d47cd2 --- /dev/null +++ b/node_cli/cli/mirage_node.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + + +import click + +from node_cli.utils.helper import ( + safe_load_texts, +) + + +G_TEXTS = safe_load_texts() +TEXTS = G_TEXTS['mirage'] + + +@click.group('node', help='Commands for the Mirage node.') +def mirage_node_cli(): + pass + + +@mirage_node_cli.group(help='Mirage node commands') +def mirage(): + pass diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 33ee9ab2..58f002d9 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -56,6 +56,7 @@ COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') SYNC_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-sync.yml') +MIRAGE_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-mirage.yml') STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') NGINX_TEMPLATE_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'nginx.conf.j2') diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index c6528421..80213f13 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -24,34 +24,47 @@ from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH from node_cli.configs.alias_address_validation import validate_env_alias_or_address, ContractType +from node_cli.core.node import NodeTypes from node_cli.utils.helper import error_exit SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') -ALLOWED_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet'] +ALLOWED_SKALE_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet'] +ALLOWED_MIRAGE_ENV_TYPES = ['mainnet-mirage', 'devnet-mirage'] +ALLOWED_ENV_TYPES = [*ALLOWED_SKALE_ENV_TYPES, *ALLOWED_MIRAGE_ENV_TYPES] -REQUIRED_PARAMS: Dict[str, str] = { +PROTO_REQUIRED_PARAMS: Dict[str, str] = { 'CONTAINER_CONFIGS_STREAM': '', 'ENDPOINT': '', 'MANAGER_CONTRACTS': '', - 'IMA_CONTRACTS': '', - 'FILEBEAT_HOST': '', 'DISK_MOUNTPOINT': '', 'SGX_SERVER_URL': '', - 'DOCKER_LVMPY_STREAM': '', 'ENV_TYPE': '', } +REQUIRED_PARAMS_SKALE: Dict[str, str] = { + **PROTO_REQUIRED_PARAMS, + 'IMA_CONTRACTS': '', + 'DOCKER_LVMPY_STREAM': '', + 'FILEBEAT_HOST': '', +} + +REQUIRED_PARAMS_MIRAGE_BOOT: Dict[str, str] = { + **PROTO_REQUIRED_PARAMS, + 'IMA_CONTRACTS': '', + 'FILEBEAT_HOST': '', +} +REQUIRED_PARAMS_MIRAGE: Dict[str, str] = { + **PROTO_REQUIRED_PARAMS, + 'FILEBEAT_HOST': '', +} + REQUIRED_PARAMS_SYNC: Dict[str, str] = { + **PROTO_REQUIRED_PARAMS, 'SCHAIN_NAME': '', - 'CONTAINER_CONFIGS_STREAM': '', - 'ENDPOINT': '', - 'MANAGER_CONTRACTS': '', 'IMA_CONTRACTS': '', - 'DISK_MOUNTPOINT': '', 'DOCKER_LVMPY_STREAM': '', - 'ENV_TYPE': '', } OPTIONAL_PARAMS: Dict[str, str] = { @@ -76,10 +89,12 @@ def absent_required_params(params: Dict[str, str]) -> List[str]: def get_validated_env_config( - env_filepath: str = SKALE_DIR_ENV_FILEPATH, sync_node: bool = False + env_filepath: str = SKALE_DIR_ENV_FILEPATH, + node_type: NodeTypes = NodeTypes.REGULAR, + is_mirage_boot: bool = False, ) -> Dict[str, str]: load_env_file(env_filepath) - params = build_env_params(sync_node) + params = build_env_params(node_type=node_type, is_mirage_boot=is_mirage_boot) populate_env_params(params) validate_env_params(params) return params @@ -90,9 +105,19 @@ def load_env_file(env_filepath: str) -> None: error_exit(f'Failed to load environment from {env_filepath}') -def build_env_params(sync_node: bool = False) -> Dict[str, str]: +def build_env_params( + node_type: NodeTypes = NodeTypes.REGULAR, is_mirage_boot: bool = False +) -> Dict[str, str]: """Return environment variables dictionary with keys based on node type.""" - params = REQUIRED_PARAMS_SYNC.copy() if sync_node else REQUIRED_PARAMS.copy() + if node_type == NodeTypes.MIRAGE and is_mirage_boot: + params = REQUIRED_PARAMS_MIRAGE_BOOT.copy() + elif node_type == NodeTypes.MIRAGE: + params = REQUIRED_PARAMS_MIRAGE.copy() + elif node_type == NodeTypes.SYNC: + params = REQUIRED_PARAMS_SYNC.copy() + else: + params = REQUIRED_PARAMS_SKALE.copy() + params.update(OPTIONAL_PARAMS) return params diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index f9d42212..7ffe3a81 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -154,6 +154,9 @@ def merge_reports( class BaseChecker: + def __init__(self, requirements: Dict) -> None: + self.requirements = requirements + def _ok(self, name: str, info: Optional[Union[str, Dict]] = None) -> CheckResult: return CheckResult(name=name, status='ok', info=info) @@ -169,7 +172,8 @@ def get_checks(self, check_type: CheckType = CheckType.ALL) -> FuncList: methods = inspect.getmembers( type(self), predicate=lambda m: inspect.isfunction(m) - and getattr(m, '_check_type', None) in allowed_types, + and getattr(m, '_check_type', None) in allowed_types + and self.requirements.get(m.__name__, None) is not None, ) return [functools.partial(m[1], self) for m in methods] @@ -190,9 +194,9 @@ class MachineChecker(BaseChecker): def __init__( self, requirements: Dict, disk_device: str, network_timeout: Optional[int] = None ) -> None: - self.requirements = requirements self.disk_device = disk_device self.network_timeout = network_timeout or NETWORK_CHECK_TIMEOUT + super().__init__(requirements=requirements) @preinstall def cpu_total(self) -> CheckResult: @@ -274,7 +278,7 @@ def network(self) -> CheckResult: class PackageChecker(BaseChecker): def __init__(self, requirements: Dict) -> None: - self.requirements = requirements + super().__init__(requirements=requirements) def _check_apt_package(self, package_name: str, version: str = None) -> CheckResult: # TODO: check versions @@ -327,7 +331,7 @@ def _version_from_dpkg_output(self, output: str) -> str: class DockerChecker(BaseChecker): def __init__(self, requirements: Dict) -> None: self.docker_client = docker.from_env() - self.requirements = requirements + super().__init__(requirements=requirements) def _check_docker_command(self) -> Optional[str]: return shutil.which('docker') diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py new file mode 100644 index 00000000..e4d601dc --- /dev/null +++ b/node_cli/core/mirage_boot.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + + +import logging +import time + +from node_cli.configs import TM_INIT_TIMEOUT +from node_cli.core.resources import update_resource_allocation +from node_cli.core.node import NodeTypes, compose_node_env, is_base_containers_alive +from node_cli.operations import init_mirage_boot_op, migrate_mirage_boot_op +from node_cli.utils.decorators import check_not_inited, check_inited, check_user +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit +from node_cli.utils.print_formatters import print_node_cmd_error +from node_cli.utils.texts import Texts + + +logger = logging.getLogger(__name__) +TEXTS = Texts() + + +@check_not_inited +def init(env_filepath: str) -> None: + env = compose_node_env( + env_filepath, + node_type=NodeTypes.MIRAGE, + is_mirage_boot=True, + ) + + init_mirage_boot_op(env_filepath, env) + logger.info('Waiting for mirage containers initialization') + time.sleep(TM_INIT_TIMEOUT) + if not is_base_containers_alive(NodeTypes.MIRAGE): + error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) + logger.info('Generating mirage resource allocation file ...') + update_resource_allocation(env['ENV_TYPE']) + logger.info('Init mirage procedure finished') + + +@check_inited +@check_user +def migrate(env_filepath: str, pull_config_for_schain: str, unsafe_ok: bool = False) -> None: + logger.info('Node update started') + env = compose_node_env( + env_filepath, + inited_node=True, + sync_schains=False, + pull_config_for_schain=pull_config_for_schain, + node_type=NodeTypes.MIRAGE, + ) + migrate_ok = migrate_mirage_boot_op(env_filepath, env) + if migrate_ok: + logger.info('Waiting for containers initialization') + time.sleep(TM_INIT_TIMEOUT) + alive = is_base_containers_alive(node_type=NodeTypes.MIRAGE) + if not migrate_ok or not alive: + print_node_cmd_error() + return + else: + logger.info('Node migration from Mirage Boot to Mirage Main finished successfully!') diff --git a/node_cli/core/nginx.py b/node_cli/core/nginx.py index e87b17db..cf10a196 100644 --- a/node_cli/core/nginx.py +++ b/node_cli/core/nginx.py @@ -20,11 +20,11 @@ import logging import os.path -from node_cli.utils.docker_utils import restart_nginx_container, docker_client +from node_cli.cli.info import TYPE from node_cli.configs import NODE_CERTS_PATH, NGINX_TEMPLATE_FILEPATH, NGINX_CONFIG_FILEPATH +from node_cli.utils.docker_utils import restart_nginx_container, docker_client from node_cli.utils.helper import process_template - logger = logging.getLogger(__name__) @@ -34,10 +34,12 @@ def generate_nginx_config() -> None: ssl_on = check_ssl_certs() + regular_node = TYPE != 'mirage' template_data = { 'ssl': ssl_on, + 'regular_node': regular_node, } - logger.info(f'Processing nginx template. ssl: {ssl_on}') + logger.info(f'Processing nginx template. ssl: {ssl_on}, regular_node: {regular_node}') process_template(NGINX_TEMPLATE_FILEPATH, NGINX_CONFIG_FILEPATH, template_data) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index ff5da975..6d55daec 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -73,14 +73,19 @@ from node_cli.utils.texts import Texts from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.decorators import check_not_inited, check_inited, check_user -from node_cli.utils.docker_utils import is_admin_running, is_api_running, is_sync_admin_running +from node_cli.utils.docker_utils import ( + is_admin_running, + is_api_running, + BASE_SKALE_COMPOSE_SERVICES, + BASE_SYNC_COMPOSE_SERVICES, + BASE_MIRAGE_COMPOSE_SERVICES, +) from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 logger = logging.getLogger(__name__) TEXTS = Texts() -SYNC_BASE_CONTAINERS_AMOUNT = 2 BASE_CONTAINERS_AMOUNT = 5 BLUEPRINT_NAME = 'node' @@ -96,11 +101,20 @@ class NodeStatuses(Enum): NOT_CREATED = 5 -def is_update_safe(sync_node: bool = False) -> bool: - if not sync_node and not is_admin_running() and not is_api_running(): - return True - if sync_node and not is_sync_admin_running(): - return True +class NodeTypes(Enum): + """This class contains possible node types.""" + + REGULAR = 0 + SYNC = 1 + MIRAGE = 2 + + +def is_update_safe(node_type: NodeTypes = NodeTypes.REGULAR) -> bool: + if not is_admin_running(node_type): + if node_type == NodeTypes.SYNC: + return True + elif not is_api_running(node_type): + return True status, payload = get_request(BLUEPRINT_NAME, 'update-safe') if status == 'error': return False @@ -140,9 +154,7 @@ def register_node(name, p2p_ip, public_ip, port, domain_name): def init(env_filepath): env = compose_node_env(env_filepath) - inited_ok = init_op(env_filepath, env) - if not inited_ok: - error_exit('Init operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) + init_op(env_filepath, env) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(): @@ -177,15 +189,13 @@ def restore(backup_path, env_filepath, no_snapshot=False, config_only=False): def init_sync( env_filepath: str, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str] ) -> None: - env = compose_node_env(env_filepath, sync_node=True) + env = compose_node_env(env_filepath, node_type=NodeTypes.SYNC) if env is None: return - inited_ok = init_sync_op(env_filepath, env, indexer, archive, snapshot, snapshot_from) - if not inited_ok: - error_exit('Init operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) + init_sync_op(env_filepath, env, indexer, archive, snapshot, snapshot_from) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(sync_node=True): + if not is_base_containers_alive(NodeTypes.SYNC): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Sync node initialized successfully') @@ -197,12 +207,12 @@ def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: prev_version = get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() - env = compose_node_env(env_filepath, sync_node=True) + env = compose_node_env(env_filepath, node_type=NodeTypes.SYNC) update_ok = update_sync_op(env_filepath, env) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(sync_node=True) + alive = is_base_containers_alive(NodeTypes.SYNC) if not update_ok or not alive: print_node_cmd_error() return @@ -213,7 +223,7 @@ def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: @check_inited @check_user def cleanup_sync() -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, sync_node=True) + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeTypes.SYNC) schain_name = env['SCHAIN_NAME'] cleanup_sync_op(env, schain_name) logger.info('Sync node was cleaned up, all containers and data removed') @@ -224,17 +234,18 @@ def compose_node_env( inited_node: bool = False, sync_schains: Optional[bool] = None, pull_config_for_schain: Optional[str] = None, - sync_node: bool = False, + node_type: NodeTypes = NodeTypes.REGULAR, save: bool = True, -) -> dict: + is_mirage_boot: bool = False, +) -> dict[str, str]: if env_filepath is not None: - env_params = get_validated_env_config(env_filepath, sync_node=sync_node) + env_params = get_validated_env_config(env_filepath, node_type=node_type) if save: save_env_params(env_filepath) else: - env_params = get_validated_env_config(INIT_ENV_FILEPATH, sync_node=sync_node) + env_params = get_validated_env_config(INIT_ENV_FILEPATH, node_type=node_type) - mnt_dir = SCHAINS_MNT_DIR_SYNC if sync_node else SCHAINS_MNT_DIR_REGULAR + mnt_dir = SCHAINS_MNT_DIR_SYNC if node_type == NodeTypes.SYNC else SCHAINS_MNT_DIR_REGULAR env = { 'SKALE_DIR': SKALE_DIR, @@ -244,10 +255,10 @@ def compose_node_env( **env_params, } - if inited_node and not sync_node: + if inited_node and not node_type == NodeTypes.SYNC: env['FLASK_SECRET_KEY'] = get_flask_secret_key() - if sync_schains and not sync_node: + if sync_schains and not node_type == NodeTypes.SYNC: env['BACKUP_RUN'] = 'True' if pull_config_for_schain: @@ -404,11 +415,25 @@ def turn_on(maintenance_off, sync_schains, env_file): set_maintenance_mode_off() -def is_base_containers_alive(sync_node: bool = False): +def get_base_containers_amount(node_type: NodeTypes = NodeTypes.REGULAR): + if node_type == NodeTypes.SYNC: + return len(BASE_SYNC_COMPOSE_SERVICES) + elif node_type == NodeTypes.MIRAGE: + return len(BASE_MIRAGE_COMPOSE_SERVICES) + else: + return len(BASE_SKALE_COMPOSE_SERVICES) + + +def is_base_containers_alive(node_type: NodeTypes = NodeTypes.REGULAR) -> bool: + if node_type == NodeTypes.MIRAGE: + prefix = 'mirage_' + else: + prefix = 'skale_' + dclient = docker.from_env() containers = dclient.containers.list() - skale_containers = list(filter(lambda c: c.name.startswith('skale_'), containers)) - containers_amount = SYNC_BASE_CONTAINERS_AMOUNT if sync_node else BASE_CONTAINERS_AMOUNT + skale_containers = list(filter(lambda c: c.name.startswith(prefix), containers)) + containers_amount = get_base_containers_amount(node_type) return len(skale_containers) >= containers_amount diff --git a/node_cli/main.py b/node_cli/main.py index 8320331f..fd16cd78 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -38,6 +38,9 @@ from node_cli.cli.exit import exit_cli from node_cli.cli.resources_allocation import resources_allocation_cli from node_cli.cli.sync_node import sync_node_cli +from node_cli.cli.mirage_boot import mirage_boot_cli +from node_cli.cli.mirage_node import mirage_node_cli + from node_cli.utils.helper import safe_load_texts, init_default_logger from node_cli.configs import LONG_LINE @@ -82,6 +85,15 @@ def info(): def get_sources_list() -> List[click.MultiCommand]: if TYPE == 'sync': return [cli, sync_node_cli, ssl_cli] + elif TYPE == 'mirage': + return [ + cli, + logs_cli, + mirage_boot_cli, + mirage_node_cli, + wallet_cli, + ssl_cli, + ] else: return [ cli, diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 5c53ec18..1b8f8f35 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -21,6 +21,8 @@ update as update_op, init as init_op, init_sync as init_sync_op, + init_mirage_boot as init_mirage_boot_op, + migrate_mirage_boot as migrate_mirage_boot_op, update_sync as update_sync_op, turn_off as turn_off_op, turn_on as turn_on_op, diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index a4446ef6..d3e8dc01 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -24,41 +24,40 @@ import logging from typing import Dict, Optional -from node_cli.cli.info import VERSION from node_cli.configs import ( CONTAINER_CONFIG_PATH, CONTAINER_CONFIG_TMP_PATH, SKALE_DIR, GLOBAL_SKALE_DIR, ) +from node_cli.core.checks import CheckType, run_checks as run_host_checks +from node_cli.core.docker_config import configure_docker from node_cli.core.host import ( ensure_btrfs_kernel_module_autoloaded, link_env_file, prepare_host, ) - -from node_cli.core.docker_config import configure_docker -from node_cli.core.nginx import generate_nginx_config from node_cli.core.nftables import configure_nftables +from node_cli.core.nginx import generate_nginx_config +from node_cli.core.node import NodeTypes from node_cli.core.node_options import NodeOptions from node_cli.core.resources import update_resource_allocation, init_shared_space_volume - -from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive -from node_cli.operations.volume import ( - cleanup_volume_artifacts, - ensure_filestorage_mapping, - prepare_block_device, +from node_cli.core.schains import ( + update_node_cli_schain_status, + cleanup_sync_datadir, ) -from node_cli.operations.docker_lvmpy import lvmpy_install # noqa +from node_cli.cli.info import VERSION +from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive +from node_cli.operations.docker_lvmpy import lvmpy_install from node_cli.operations.skale_node import ( download_skale_node, sync_skale_node, update_images, ) -from node_cli.core.checks import CheckType, run_checks as run_host_checks -from node_cli.core.schains import ( - update_node_cli_schain_status, - cleanup_sync_datadir, +from node_cli.operations.volume import ( + cleanup_volume_artifacts, + ensure_filestorage_mapping, + prepare_block_device, ) from node_cli.utils.docker_utils import ( compose_rm, @@ -66,9 +65,9 @@ docker_cleanup, remove_dynamic_containers, ) +from node_cli.utils.helper import str_to_bool, rm_dir from node_cli.utils.meta import get_meta_info, update_meta from node_cli.utils.print_formatters import print_failed_requirements_checks -from node_cli.utils.helper import str_to_bool, rm_dir logger = logging.getLogger(__name__) @@ -149,7 +148,47 @@ def update(env_filepath: str, env: Dict) -> bool: @checked_host -def init(env_filepath: str, env: dict) -> bool: +def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: + compose_rm(env, node_type=NodeTypes.MIRAGE) + remove_dynamic_containers() + + sync_skale_node() + ensure_btrfs_kernel_module_autoloaded() + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) + configure_nftables(enable_monitoring=enable_monitoring) + + generate_nginx_config() + + prepare_host(env_filepath, env['ENV_TYPE']) + + current_stream = get_meta_info().config_stream + skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' + if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + logger.info( + 'Stream version was changed from %s to %s', + current_stream, + env['CONTAINER_CONFIGS_STREAM'], + ) + docker_cleanup() + + update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + env['DOCKER_LVMPY_STREAM'], + distro.id(), + distro.version(), + ) + update_images(env=env) + compose_up(env, node_type=NodeTypes.MIRAGE) + return True + + +@checked_host +def init(env_filepath: str, env: dict) -> None: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -180,7 +219,37 @@ def init(env_filepath: str, env: dict) -> bool: update_images(env=env) compose_up(env) - return True + + +@checked_host +def init_mirage_boot(env_filepath: str, env: dict) -> None: + sync_skale_node() + + ensure_btrfs_kernel_module_autoloaded() + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) + configure_nftables(enable_monitoring=enable_monitoring) + + prepare_host(env_filepath, env_type=env['ENV_TYPE']) + link_env_file() + + configure_filebeat() + configure_flask() + generate_nginx_config() + + update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + env['DOCKER_LVMPY_STREAM'], + distro.id(), + distro.version(), + ) + update_resource_allocation(env_type=env['ENV_TYPE']) + update_images(env=env) + + compose_up(env, node_type=NodeTypes.MIRAGE, is_mirage_boot=True) def init_sync( @@ -190,7 +259,7 @@ def init_sync( archive: bool, snapshot: bool, snapshot_from: Optional[str], -) -> bool: +) -> None: cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) download_skale_node(env.get('CONTAINER_CONFIGS_STREAM'), env.get('CONTAINER_CONFIGS_DIR')) sync_skale_node() @@ -233,8 +302,7 @@ def init_sync( update_images(env=env, sync_node=True) - compose_up(env, sync_node=True) - return True + compose_up(env, node_type=NodeTypes.SYNC) def update_sync(env_filepath: str, env: Dict) -> bool: @@ -266,7 +334,7 @@ def update_sync(env_filepath: str, env: Dict) -> bool: ) update_images(env=env, sync_node=True) - compose_up(env, sync_node=True) + compose_up(env, node_type=NodeTypes.SYNC) return True diff --git a/node_cli/operations/mirage_boot.py b/node_cli/operations/mirage_boot.py new file mode 100644 index 00000000..e110c9f6 --- /dev/null +++ b/node_cli/operations/mirage_boot.py @@ -0,0 +1,19 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 2b75f9a3..d2d710b1 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -31,10 +31,12 @@ from node_cli.configs import ( COMPOSE_PATH, SYNC_COMPOSE_PATH, + MIRAGE_COMPOSE_PATH, REMOVED_CONTAINERS_FOLDER_PATH, SGX_CERTIFICATES_DIR_NAME, NGINX_CONTAINER_NAME, ) +from node_cli.core.node import NodeTypes logger = logging.getLogger(__name__) @@ -43,8 +45,7 @@ IMA_REMOVE_TIMEOUT = 20 TELEGRAF_REMOVE_TIMEOUT = 20 -MAIN_COMPOSE_CONTAINERS = ('skale-api', 'bounty', 'skale-admin') -BASE_COMPOSE_SERVICES = ( +BASE_SKALE_COMPOSE_SERVICES = ( 'transaction-manager', 'skale-admin', 'skale-api', @@ -54,6 +55,18 @@ 'watchdog', 'filebeat', ) +BASE_SYNC_COMPOSE_SERVICES = ('skale-sync-admin', 'nginx') +PROTO_MIRAGE_COMPOSE_SERVICES = ( + 'transaction-manager', + 'skale-api', + 'redis', + 'watchdog', + 'nginx', + 'filebeat', +) +BASE_MIRAGE_COMPOSE_SERVICES = (*PROTO_MIRAGE_COMPOSE_SERVICES, 'mirage-admin') +BASE_MIRAGE_BOOT_COMPOSE_SERVICES = (*PROTO_MIRAGE_COMPOSE_SERVICES, 'mirage-boot') + MONITORING_COMPOSE_SERVICES = ( 'node-exporter', 'advisor', @@ -225,9 +238,9 @@ def is_volume_exists(name: str, dutils=None): return True -def compose_rm(env={}, sync_node: bool = False): +def compose_rm(env={}, node_type: NodeTypes = NodeTypes.REGULAR): logger.info('Removing compose containers') - compose_path = get_compose_path(sync_node) + compose_path = get_compose_path(node_type) run_cmd( cmd=( 'docker', @@ -245,47 +258,89 @@ def compose_rm(env={}, sync_node: bool = False): def compose_pull(env: dict, sync_node: bool = False): logger.info('Pulling compose containers') - compose_path = get_compose_path(sync_node) + compose_path = get_compose_path(NodeTypes.SYNC) run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'pull'), env=env) def compose_build(env: dict, sync_node: bool = False): logger.info('Building compose containers') - compose_path = get_compose_path(sync_node) + compose_path = get_compose_path(NodeTypes.SYNC) run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'build'), env=env) -def get_up_compose_cmd(services): - return ('docker', 'compose', '-f', COMPOSE_PATH, 'up', '-d', *services) +def get_compose_path(node_type: NodeTypes = NodeTypes.REGULAR) -> str: + if node_type == NodeTypes.SYNC: + return SYNC_COMPOSE_PATH + elif node_type == NodeTypes.MIRAGE: + return MIRAGE_COMPOSE_PATH + else: + return COMPOSE_PATH -def get_up_compose_sync_cmd(): - return ('docker', 'compose', '-f', SYNC_COMPOSE_PATH, 'up', '-d') +def get_compose_services(node_type: NodeTypes = NodeTypes.REGULAR) -> tuple: + if node_type == NodeTypes.SYNC: + return BASE_SYNC_COMPOSE_SERVICES + elif node_type == NodeTypes.MIRAGE: + return BASE_MIRAGE_COMPOSE_SERVICES + else: + return BASE_SKALE_COMPOSE_SERVICES -def get_compose_path(sync_node: bool) -> str: - return SYNC_COMPOSE_PATH if sync_node else COMPOSE_PATH +def get_up_compose_cmd( + node_type: NodeTypes = NodeTypes.REGULAR, services: Optional[tuple] = None +) -> tuple: + compose_path = get_compose_path(node_type) + if services is None: + services = get_compose_services(node_type) -def compose_up(env, sync_node=False): - if sync_node: + return ('docker', 'compose', '-f', compose_path, 'up', '-d', *services) + + +def compose_up(env, node_type: NodeTypes = NodeTypes.REGULAR, is_mirage_boot: bool = False): + if node_type == NodeTypes.SYNC: logger.info('Running containers for sync node') - run_cmd(cmd=get_up_compose_sync_cmd(), env=env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeTypes.SYNC), env=env) return - logger.info('Running base set of containers') - if 'SGX_CERTIFICATES_DIR_NAME' not in env: env['SGX_CERTIFICATES_DIR_NAME'] = SGX_CERTIFICATES_DIR_NAME - logger.debug('Launching containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(BASE_COMPOSE_SERVICES), env=env) + if node_type == NodeTypes.MIRAGE: + logger.info('Running mirage base set of containers') + if not is_mirage_boot: + logger.debug('Launching mirage containers with env %s', env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeTypes.MIRAGE), env=env) + else: + logger.debug('Launching mirage boot containers with env %s', env) + run_cmd( + cmd=get_up_compose_cmd( + node_type=NodeTypes.MIRAGE, services=BASE_MIRAGE_BOOT_COMPOSE_SERVICES + ), + env=env, + ) + else: + logger.info('Running skale node base set of containers') + logger.debug('Launching skale node containers with env %s', env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeTypes.REGULAR), env=env) + + if 'TG_API_KEY' in env and 'TG_CHAT_ID' in env: + logger.info('Running containers for Telegram notifications') + run_cmd( + cmd=get_up_compose_cmd( + node_type=NodeTypes.REGULAR, services=NOTIFICATION_COMPOSE_SERVICES + ), + env=env, + ) + if str_to_bool(env.get('MONITORING_CONTAINERS', 'False')): logger.info('Running monitoring containers') - run_cmd(cmd=get_up_compose_cmd(MONITORING_COMPOSE_SERVICES), env=env) - if 'TG_API_KEY' in env and 'TG_CHAT_ID' in env: - logger.info('Running containers for Telegram notifications') - run_cmd(cmd=get_up_compose_cmd(NOTIFICATION_COMPOSE_SERVICES), env=env) + run_cmd( + cmd=get_up_compose_cmd( + node_type=NodeTypes.REGULAR, services=MONITORING_COMPOSE_SERVICES + ), + env=env, + ) def restart_nginx_container(dutils=None): @@ -322,16 +377,24 @@ def is_container_running(name: str, dclient: Optional[DockerClient] = None) -> b return False -def is_admin_running(dclient: Optional[DockerClient] = None) -> bool: - return is_container_running(name='skale_admin', dclient=dclient) - - -def is_api_running(dclient: Optional[DockerClient] = None) -> bool: - return is_container_running(name='skale_api', dclient=dclient) - - -def is_sync_admin_running(dclient: Optional[DockerClient] = None) -> bool: - return is_container_running(name='skale_sync_admin', dclient=dclient) +def is_api_running( + node_type: NodeTypes = NodeTypes.REGULAR, dclient: Optional[DockerClient] = None +) -> bool: + if node_type == NodeTypes.MIRAGE: + return is_container_running(name='mirage_api', dclient=dclient) + else: + return is_container_running(name='skale_api', dclient=dclient) + + +def is_admin_running( + node_type: NodeTypes = NodeTypes.REGULAR, client: Optional[DockerClient] = None +) -> bool: + if node_type == NodeTypes.MIRAGE: + return is_container_running(name='mirage_admin', dclient=client) + elif node_type == NodeTypes.SYNC: + return is_container_running(name='skale_sync_admin', dclient=client) + else: + return is_container_running(name='skale_admin', dclient=client) def system_prune(): diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index 94e9581b..e008aba6 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -1,6 +1,7 @@ import json import os from collections import namedtuple +from typing import Optional from node_cli.configs import META_FILEPATH DEFAULT_VERSION = '1.0.0' @@ -19,12 +20,14 @@ def __new__( cls, version=DEFAULT_VERSION, config_stream=DEFAULT_CONFIG_STREAM, - docker_lvmpy_stream=DEFAULT_DOCKER_LVMPY_STREAM, + docker_lvmpy_stream: Optional[str] = DEFAULT_DOCKER_LVMPY_STREAM, os_id=DEFAULT_OS_ID, os_version=DEFAULT_OS_VERSION, ): + # If docker_lvmpy_stream is None, use the default value + actual_docker_lvmpy_stream = docker_lvmpy_stream or DEFAULT_DOCKER_LVMPY_STREAM return super(CliMeta, cls).__new__( - cls, version, config_stream, docker_lvmpy_stream, os_id, os_version + cls, version, config_stream, actual_docker_lvmpy_stream, os_id, os_version ) @@ -53,14 +56,18 @@ def compose_default_meta() -> CliMeta: ) -def ensure_meta(meta: CliMeta = None) -> None: +def ensure_meta(meta: Optional[CliMeta] = None) -> None: if not get_meta_info(): meta = meta or compose_default_meta() save_meta(meta) def update_meta( - version: str, config_stream: str, docker_lvmpy_stream: str, os_id: str, os_version: str + version: str, + config_stream: str, + docker_lvmpy_stream: Optional[str], + os_id: str, + os_version: str, ) -> None: ensure_meta() meta = CliMeta(version, config_stream, docker_lvmpy_stream, os_id, os_version) diff --git a/scripts/build.sh b/scripts/build.sh index 624fcdf4..d99bb45a 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -24,7 +24,7 @@ fi if [ -z "$3" ] then - (>&2 echo 'You should provide type: normal or sync') + (>&2 echo 'You should provide type: normal, sync or mirage') echo $USAGE_MSG exit 1 fi @@ -39,6 +39,8 @@ OS=`uname -s`-`uname -m` if [ "$TYPE" = "sync" ]; then EXECUTABLE_NAME=skale-$VERSION-$OS-sync +elif [ "$TYPE" = "mirage" ]; then + EXECUTABLE_NAME=skale-$VERSION-$OS-mirage else EXECUTABLE_NAME=skale-$VERSION-$OS fi diff --git a/scripts/generate_info.sh b/scripts/generate_info.sh index d554712a..32253148 100755 --- a/scripts/generate_info.sh +++ b/scripts/generate_info.sh @@ -18,7 +18,7 @@ if [ -z "$BRANCH" ]; then exit 1 fi if [ -z "$TYPE" ]; then - (>&2 echo 'You should provide type: normal or sync') + (>&2 echo 'You should provide type: normal, sync or mirage') echo $USAGE_MSG exit 1 fi diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index f45a9be3..7649d020 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -11,7 +11,7 @@ from node_cli.configs import NODE_DATA_PATH from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH -from node_cli.core.node import BASE_CONTAINERS_AMOUNT, is_base_containers_alive +from node_cli.core.node import NodeTypes, get_base_containers_amount, is_base_containers_alive from node_cli.core.node import init, pack_dir, update, is_update_safe from node_cli.utils.meta import CliMeta @@ -29,7 +29,7 @@ def skale_base_containers(): containers = [ dclient.containers.run(ALPINE_IMAGE_NAME, detach=True, name=f'skale_test{i}', command=CMD) - for i in range(BASE_CONTAINERS_AMOUNT) + for i in range(get_base_containers_amount()) ] yield containers for c in containers: @@ -40,7 +40,7 @@ def skale_base_containers(): def skale_base_containers_without_one(): containers = [ dclient.containers.run(ALPINE_IMAGE_NAME, detach=True, name=f'skale_test{i}', command=CMD) - for i in range(BASE_CONTAINERS_AMOUNT - 1) + for i in range(get_base_containers_amount() - 1) ] yield containers for c in containers: @@ -51,7 +51,7 @@ def skale_base_containers_without_one(): def skale_base_containers_exited(): containers = [ dclient.containers.run(HELLO_WORLD_IMAGE_NAME, detach=True, name=f'skale_test{i}') - for i in range(BASE_CONTAINERS_AMOUNT) + for i in range(get_base_containers_amount()) ] time.sleep(10) yield containers @@ -187,16 +187,16 @@ def test_update_node(mocked_g_config, resource_file): def test_is_update_safe(): assert is_update_safe() - assert is_update_safe(sync_node=True) + assert is_update_safe(node_type=NodeTypes.SYNC) with mock.patch('node_cli.core.node.is_admin_running', return_value=True): with mock.patch('node_cli.core.node.is_api_running', return_value=True): assert not is_update_safe() - assert is_update_safe(sync_node=True) + assert is_update_safe(node_type=NodeTypes.SYNC) - with mock.patch('node_cli.core.node.is_sync_admin_running', return_value=True): + with mock.patch('node_cli.core.node.is_admin_running', return_value=True): assert is_update_safe() - assert not is_update_safe(sync_node=True) + assert not is_update_safe(node_type=NodeTypes.SYNC) with mock.patch('node_cli.utils.docker_utils.is_container_running', return_value=True): with mock.patch( diff --git a/text.yml b/text.yml index 80461c76..0cf1fce1 100644 --- a/text.yml +++ b/text.yml @@ -76,3 +76,11 @@ lvmpy: heal: help: Run healing procedure for lvmpy server prompt: Are you sure you want run healing procedure? + +mirage: + init: + help: Initialize sync SKALE node + indexer: Run sync node in indexer mode (disable block rotation) + archive: Enable historic state and disable block rotation + snapshot_from: IP of the node to take snapshot from + snapshot: Start sync node from snapshot \ No newline at end of file From 4f6968243c8e6654dbdd41c48f40d7e11a0123a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 17 Apr 2025 19:53:55 +0100 Subject: [PATCH 027/332] Implement mirage node commands (signature, backup, restore) Completes the implementation of the `mirage node` command namespace by adding functionality for signature retrieval, backup, and restore operations. Changes include: - Implemented `mirage node signature` command, reusing the core `get_node_signature` function. - Implemented `mirage node backup` command, reusing the core `backup` function. - Added `restore_mirage` function to `node_cli/operations/base.py` to handle Mirage-specific restoration logic. - Implemented `mirage node restore` command, utilizing the new `restore_mirage` function. - Updated `node_cli/operations/base.py` functions (`turn_off`, `update_sync`) to use `NodeTypes` enum for better clarity (refactor). - Fixed datetime formatting and typo in `node_cli/core/node.py`. - Removed placeholder file `node_cli/operations/mirage_boot.py` as logic is integrated elsewhere. Placeholders remain for `mirage node init` and `mirage node register`. --- node_cli/cli/mirage_node.py | 60 +++++++++++++++++++++++++----- node_cli/core/mirage_node.py | 54 +++++++++++++++++++++++++++ node_cli/core/node.py | 4 +- node_cli/operations/__init__.py | 1 + node_cli/operations/base.py | 53 ++++++++++++++++++++++++-- node_cli/operations/mirage_boot.py | 19 ---------- 6 files changed, 156 insertions(+), 35 deletions(-) create mode 100644 node_cli/core/mirage_node.py delete mode 100644 node_cli/operations/mirage_boot.py diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 02d47cd2..9f5c0ffd 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -17,23 +17,63 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . - import click -from node_cli.utils.helper import ( - safe_load_texts, -) +from node_cli.core.node import get_node_signature, backup +from node_cli.core.mirage_node import restore_mirage +from node_cli.utils.helper import error_exit, streamed_cmd +from node_cli.utils.decorators import check_inited -G_TEXTS = safe_load_texts() -TEXTS = G_TEXTS['mirage'] +@click.group('node', help='Commands for regular Mirage Node operations.') +def mirage_node_cli(): + pass -@click.group('node', help='Commands for the Mirage node.') -def mirage_node_cli(): +@mirage_node_cli.command('init', help='Initialize regular Mirage node operations (Placeholder).') +@check_inited +def init_node(): + click.echo("Placeholder: Command 'mirage node init' is not yet implemented.") pass -@mirage_node_cli.group(help='Mirage node commands') -def mirage(): +@mirage_node_cli.command( + 'register', help='Register Mirage node (Placeholder for regular operations).' +) +@check_inited +def register_node(): + click.echo("Placeholder: Command 'mirage node register' is not yet implemented.") pass + + +@mirage_node_cli.command('signature', help='Get mirage node signature for a validator ID.') +@click.argument('validator_id') +def signature_node(validator_id): + res = get_node_signature(validator_id) + if isinstance(res, dict) and 'error' in res: + error_exit(f'Error getting signature: {res.get("message", res)}') + print(f'Signature: {res}') + + +@mirage_node_cli.command('backup', help='Generate backup file for the Mirage node.') +@click.argument('backup_folder_path') +@streamed_cmd +def backup_node(backup_folder_path): + backup(backup_folder_path) + + +@mirage_node_cli.command('restore', help='Restore Mirage node from a backup file.') +@click.argument('backup_path') +@click.argument('env_file') +@click.option( + '--no-snapshot', help='Do not restore mirage from snapshot', is_flag=True, hidden=True +) +@click.option( + '--config-only', + help='Only restore configuration files in .skale and artifacts', + is_flag=True, + hidden=True, +) +@streamed_cmd +def restore_node(backup_path, env_file, no_snapshot, config_only): + restore_mirage(backup_path, env_file, no_snapshot, config_only) diff --git a/node_cli/core/mirage_node.py b/node_cli/core/mirage_node.py new file mode 100644 index 00000000..81923313 --- /dev/null +++ b/node_cli/core/mirage_node.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + + +import logging +import time + +from node_cli.configs import SKALE_DIR, RESTORE_SLEEP_TIMEOUT +from node_cli.core.host import save_env_params +from node_cli.core.node import NodeTypes, compose_node_env +from node_cli.utils.decorators import check_not_inited +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit +from node_cli.utils.texts import Texts +from node_cli.operations import restore_mirage_op + + +logger = logging.getLogger(__name__) +TEXTS = Texts() + + +@check_not_inited +def restore_mirage(backup_path, env_filepath, no_snapshot=False, config_only=False): + env = compose_node_env(env_filepath, node_type=NodeTypes.MIRAGE) + if env is None: + return + save_env_params(env_filepath) + env['SKALE_DIR'] = SKALE_DIR + + if not no_snapshot: + logger.info('Adding BACKUP_RUN to env ...') + env['BACKUP_RUN'] = 'True' + + restored_ok = restore_mirage_op(env, backup_path, config_only=config_only) + if not restored_ok: + error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) + time.sleep(RESTORE_SLEEP_TIMEOUT) + print('Mirage node is restored from backup') diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 6d55daec..f8057558 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -311,7 +311,7 @@ def backup(path): def get_backup_filename(): - time = datetime.datetime.utcnow().strftime('%Y-%m-%d-%H-%M-%S') + time = datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%d-%H-%M-%S') return f'{BACKUP_ARCHIVE_NAME}-{time}.tar.gz' @@ -338,7 +338,7 @@ def pack_dir(source: str, dest: str, exclude: Tuple[str] = ()): def logfilter(tarinfo): path = Path(tarinfo.name) for e in exclude: - logger.debug('Cheking if %s is parent of %s', e, tarinfo.name) + logger.debug('Checking if %s is parent of %s', e, tarinfo.name) try: path.relative_to(e) except ValueError: diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 1b8f8f35..17706533 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -27,6 +27,7 @@ turn_off as turn_off_op, turn_on as turn_on_op, restore as restore_op, + restore_mirage as restore_mirage_op, cleanup_sync as cleanup_sync_op, configure_nftables, ) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index d3e8dc01..6febd72d 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -306,7 +306,7 @@ def init_sync( def update_sync(env_filepath: str, env: Dict) -> bool: - compose_rm(env, sync_node=True) + compose_rm(env, node_type=NodeTypes.MIRAGE) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) download_skale_node(env['CONTAINER_CONFIGS_STREAM'], env.get('CONTAINER_CONFIGS_DIR')) @@ -338,9 +338,9 @@ def update_sync(env_filepath: str, env: Dict) -> bool: return True -def turn_off(env: dict, sync_node: bool = False) -> None: +def turn_off(env: dict, node_type: NodeTypes = NodeTypes.REGULAR) -> None: logger.info('Turning off the node...') - compose_rm(env=env, sync_node=sync_node) + compose_rm(env=env, node_type=node_type) remove_dynamic_containers() logger.info('Node was successfully turned off') @@ -412,8 +412,53 @@ def restore(env, backup_path, config_only=False): return True +def restore_mirage(env, backup_path, config_only=False): + unpack_backup_archive(backup_path) + failed_checks = run_host_checks( + env['DISK_MOUNTPOINT'], + env['ENV_TYPE'], + CONTAINER_CONFIG_PATH, + check_type=CheckType.PREINSTALL, + ) + if failed_checks: + print_failed_requirements_checks(failed_checks) + return False + + ensure_btrfs_kernel_module_autoloaded() + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) + configure_nftables(enable_monitoring=enable_monitoring) + + link_env_file() + + update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + env['DOCKER_LVMPY_STREAM'], + distro.id(), + distro.version(), + ) + + if not config_only: + compose_up(env, node_type=NodeTypes.MIRAGE) + + failed_checks = run_host_checks( + env['DISK_MOUNTPOINT'], + env['ENV_TYPE'], + CONTAINER_CONFIG_PATH, + check_type=CheckType.POSTINSTALL, + ) + if failed_checks: + print_failed_requirements_checks(failed_checks) + return False + return True + + def cleanup_sync(env, schain_name: str) -> None: - turn_off(env, sync_node=True) + turn_off(env, node_type=NodeTypes.SYNC) cleanup_sync_datadir(schain_name=schain_name) rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) diff --git a/node_cli/operations/mirage_boot.py b/node_cli/operations/mirage_boot.py deleted file mode 100644 index e110c9f6..00000000 --- a/node_cli/operations/mirage_boot.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of node-cli -# -# Copyright (C) 2025-Present SKALE Labs -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - From 7c92d7acfb39e7157952183ec4b554be627b5b84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 21 Apr 2025 13:38:55 +0100 Subject: [PATCH 028/332] Addressed review comments for PR #851 Draft --- node_cli/cli/mirage_node.py | 26 ++++++++++++---- node_cli/configs/__init__.py | 2 +- node_cli/configs/env.py | 22 ++++++------- node_cli/core/logs.py | 2 +- node_cli/core/mirage_boot.py | 10 +++--- node_cli/core/mirage_node.py | 10 ++---- node_cli/core/nginx.py | 7 ++++- node_cli/core/node.py | 41 +++++++++++++------------ node_cli/core/schains.py | 4 +-- node_cli/main.py | 13 ++++---- node_cli/operations/base.py | 21 ++++++------- node_cli/operations/volume.py | 4 +-- node_cli/utils/docker_utils.py | 56 +++++++++++++++++----------------- node_cli/utils/meta.py | 4 +-- scripts/generate_info.sh | 25 +++++++++++++-- tests/conftest.py | 8 ++--- tests/core/core_node_test.py | 8 ++--- tests/helper.py | 2 +- 18 files changed, 148 insertions(+), 117 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 9f5c0ffd..f4ac7a1b 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -21,7 +21,7 @@ from node_cli.core.node import get_node_signature, backup from node_cli.core.mirage_node import restore_mirage -from node_cli.utils.helper import error_exit, streamed_cmd +from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false from node_cli.utils.decorators import check_inited @@ -46,6 +46,23 @@ def register_node(): pass +@mirage_node_cli.command('update', help='Update Mirage.') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to update Mirage node software?', +) +@click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) +@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) +@click.argument('env_file') +@streamed_cmd +def update_node(env_file, pull_config_for_schain, unsafe_ok): + click.echo("Placeholder: Command 'mirage node update' is not yet implemented.") + pass + + @mirage_node_cli.command('signature', help='Get mirage node signature for a validator ID.') @click.argument('validator_id') def signature_node(validator_id): @@ -65,9 +82,6 @@ def backup_node(backup_folder_path): @mirage_node_cli.command('restore', help='Restore Mirage node from a backup file.') @click.argument('backup_path') @click.argument('env_file') -@click.option( - '--no-snapshot', help='Do not restore mirage from snapshot', is_flag=True, hidden=True -) @click.option( '--config-only', help='Only restore configuration files in .skale and artifacts', @@ -75,5 +89,5 @@ def backup_node(backup_folder_path): hidden=True, ) @streamed_cmd -def restore_node(backup_path, env_file, no_snapshot, config_only): - restore_mirage(backup_path, env_file, no_snapshot, config_only) +def restore_node(backup_path, env_file, config_only): + restore_mirage(backup_path, env_file, config_only) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 58f002d9..5056c58c 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -34,7 +34,7 @@ FILESTORAGE_MAPPING = os.path.join(SKALE_STATE_DIR, 'filestorage') SNAPSHOTS_SHARED_VOLUME = 'shared-space' SCHAINS_MNT_DIR_REGULAR = '/mnt' -SCHAINS_MNT_DIR_SYNC = '/var/lib/skale/schains' +SCHAINS_MNT_DIR_SINGLE_CHAIN = '/var/lib/skale/schains' VOLUME_GROUP = 'schains' SKALE_DIR = os.path.join(G_CONF_HOME, '.skale') diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 80213f13..ad62e932 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -24,7 +24,7 @@ from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH from node_cli.configs.alias_address_validation import validate_env_alias_or_address, ContractType -from node_cli.core.node import NodeTypes +from node_cli.core.node import NodeType from node_cli.utils.helper import error_exit SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') @@ -34,7 +34,7 @@ ALLOWED_MIRAGE_ENV_TYPES = ['mainnet-mirage', 'devnet-mirage'] ALLOWED_ENV_TYPES = [*ALLOWED_SKALE_ENV_TYPES, *ALLOWED_MIRAGE_ENV_TYPES] -PROTO_REQUIRED_PARAMS: Dict[str, str] = { +CORE_REQUIRED_PARAMS: Dict[str, str] = { 'CONTAINER_CONFIGS_STREAM': '', 'ENDPOINT': '', 'MANAGER_CONTRACTS': '', @@ -44,24 +44,24 @@ } REQUIRED_PARAMS_SKALE: Dict[str, str] = { - **PROTO_REQUIRED_PARAMS, + **CORE_REQUIRED_PARAMS, 'IMA_CONTRACTS': '', 'DOCKER_LVMPY_STREAM': '', 'FILEBEAT_HOST': '', } REQUIRED_PARAMS_MIRAGE_BOOT: Dict[str, str] = { - **PROTO_REQUIRED_PARAMS, + **CORE_REQUIRED_PARAMS, 'IMA_CONTRACTS': '', 'FILEBEAT_HOST': '', } REQUIRED_PARAMS_MIRAGE: Dict[str, str] = { - **PROTO_REQUIRED_PARAMS, + **CORE_REQUIRED_PARAMS, 'FILEBEAT_HOST': '', } REQUIRED_PARAMS_SYNC: Dict[str, str] = { - **PROTO_REQUIRED_PARAMS, + **CORE_REQUIRED_PARAMS, 'SCHAIN_NAME': '', 'IMA_CONTRACTS': '', 'DOCKER_LVMPY_STREAM': '', @@ -90,7 +90,7 @@ def absent_required_params(params: Dict[str, str]) -> List[str]: def get_validated_env_config( env_filepath: str = SKALE_DIR_ENV_FILEPATH, - node_type: NodeTypes = NodeTypes.REGULAR, + node_type: NodeType = NodeType.REGULAR, is_mirage_boot: bool = False, ) -> Dict[str, str]: load_env_file(env_filepath) @@ -106,14 +106,14 @@ def load_env_file(env_filepath: str) -> None: def build_env_params( - node_type: NodeTypes = NodeTypes.REGULAR, is_mirage_boot: bool = False + node_type: NodeType = NodeType.REGULAR, is_mirage_boot: bool = False ) -> Dict[str, str]: """Return environment variables dictionary with keys based on node type.""" - if node_type == NodeTypes.MIRAGE and is_mirage_boot: + if node_type == NodeType.MIRAGE and is_mirage_boot: params = REQUIRED_PARAMS_MIRAGE_BOOT.copy() - elif node_type == NodeTypes.MIRAGE: + elif node_type == NodeType.MIRAGE: params = REQUIRED_PARAMS_MIRAGE.copy() - elif node_type == NodeTypes.SYNC: + elif node_type == NodeType.SYNC: params = REQUIRED_PARAMS_SYNC.copy() else: params = REQUIRED_PARAMS_SKALE.copy() diff --git a/node_cli/core/logs.py b/node_cli/core/logs.py index 3060b874..67472e96 100644 --- a/node_cli/core/logs.py +++ b/node_cli/core/logs.py @@ -58,7 +58,7 @@ def create_logs_dump(path, filter_container=None): def create_dump_dir(): - time = datetime.datetime.utcnow().strftime('%Y-%m-%d--%H-%M-%S') + time = datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%d--%H-%M-%S') folder_name = f'skale-logs-dump-{time}' folder_path = os.path.join(SKALE_TMP_DIR, folder_name) containers_path = os.path.join(folder_path, 'containers') diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index e4d601dc..a6f9eacd 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -23,7 +23,7 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.resources import update_resource_allocation -from node_cli.core.node import NodeTypes, compose_node_env, is_base_containers_alive +from node_cli.core.node import NodeType, compose_node_env, is_base_containers_alive from node_cli.operations import init_mirage_boot_op, migrate_mirage_boot_op from node_cli.utils.decorators import check_not_inited, check_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes @@ -40,14 +40,14 @@ def init(env_filepath: str) -> None: env = compose_node_env( env_filepath, - node_type=NodeTypes.MIRAGE, + node_type=NodeType.MIRAGE, is_mirage_boot=True, ) init_mirage_boot_op(env_filepath, env) logger.info('Waiting for mirage containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(NodeTypes.MIRAGE): + if not is_base_containers_alive(NodeType.MIRAGE): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Generating mirage resource allocation file ...') update_resource_allocation(env['ENV_TYPE']) @@ -63,13 +63,13 @@ def migrate(env_filepath: str, pull_config_for_schain: str, unsafe_ok: bool = Fa inited_node=True, sync_schains=False, pull_config_for_schain=pull_config_for_schain, - node_type=NodeTypes.MIRAGE, + node_type=NodeType.MIRAGE, ) migrate_ok = migrate_mirage_boot_op(env_filepath, env) if migrate_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=NodeTypes.MIRAGE) + alive = is_base_containers_alive(node_type=NodeType.MIRAGE) if not migrate_ok or not alive: print_node_cmd_error() return diff --git a/node_cli/core/mirage_node.py b/node_cli/core/mirage_node.py index 81923313..a02ca7e5 100644 --- a/node_cli/core/mirage_node.py +++ b/node_cli/core/mirage_node.py @@ -23,7 +23,7 @@ from node_cli.configs import SKALE_DIR, RESTORE_SLEEP_TIMEOUT from node_cli.core.host import save_env_params -from node_cli.core.node import NodeTypes, compose_node_env +from node_cli.core.node import NodeType, compose_node_env from node_cli.utils.decorators import check_not_inited from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit @@ -36,17 +36,13 @@ @check_not_inited -def restore_mirage(backup_path, env_filepath, no_snapshot=False, config_only=False): - env = compose_node_env(env_filepath, node_type=NodeTypes.MIRAGE) +def restore_mirage(backup_path, env_filepath, config_only=False): + env = compose_node_env(env_filepath, node_type=NodeType.MIRAGE) if env is None: return save_env_params(env_filepath) env['SKALE_DIR'] = SKALE_DIR - if not no_snapshot: - logger.info('Adding BACKUP_RUN to env ...') - env['BACKUP_RUN'] = 'True' - restored_ok = restore_mirage_op(env, backup_path, config_only=config_only) if not restored_ok: error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) diff --git a/node_cli/core/nginx.py b/node_cli/core/nginx.py index cf10a196..0e6d0b7d 100644 --- a/node_cli/core/nginx.py +++ b/node_cli/core/nginx.py @@ -22,6 +22,7 @@ from node_cli.cli.info import TYPE from node_cli.configs import NODE_CERTS_PATH, NGINX_TEMPLATE_FILEPATH, NGINX_CONFIG_FILEPATH +from node_cli.core.node import NodeType from node_cli.utils.docker_utils import restart_nginx_container, docker_client from node_cli.utils.helper import process_template @@ -32,9 +33,13 @@ SSL_CRT_NAME = 'ssl_cert' +def is_regular_node_nginx() -> bool: + return TYPE != NodeType.MIRAGE + + def generate_nginx_config() -> None: ssl_on = check_ssl_certs() - regular_node = TYPE != 'mirage' + regular_node = is_regular_node_nginx() template_data = { 'ssl': ssl_on, 'regular_node': regular_node, diff --git a/node_cli/core/node.py b/node_cli/core/node.py index f8057558..f91e33cc 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -36,7 +36,7 @@ LOG_PATH, RESTORE_SLEEP_TIMEOUT, SCHAINS_MNT_DIR_REGULAR, - SCHAINS_MNT_DIR_SYNC, + SCHAINS_MNT_DIR_SINGLE_CHAIN, SKALE_DIR, SKALE_STATE_DIR, TM_INIT_TIMEOUT, @@ -101,17 +101,15 @@ class NodeStatuses(Enum): NOT_CREATED = 5 -class NodeTypes(Enum): - """This class contains possible node types.""" - +class NodeType(Enum): REGULAR = 0 SYNC = 1 MIRAGE = 2 -def is_update_safe(node_type: NodeTypes = NodeTypes.REGULAR) -> bool: +def is_update_safe(node_type: NodeType = NodeType.REGULAR) -> bool: if not is_admin_running(node_type): - if node_type == NodeTypes.SYNC: + if node_type == NodeType.SYNC: return True elif not is_api_running(node_type): return True @@ -189,13 +187,13 @@ def restore(backup_path, env_filepath, no_snapshot=False, config_only=False): def init_sync( env_filepath: str, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str] ) -> None: - env = compose_node_env(env_filepath, node_type=NodeTypes.SYNC) + env = compose_node_env(env_filepath, node_type=NodeType.SYNC) if env is None: return init_sync_op(env_filepath, env, indexer, archive, snapshot, snapshot_from) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(NodeTypes.SYNC): + if not is_base_containers_alive(NodeType.SYNC): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Sync node initialized successfully') @@ -207,12 +205,12 @@ def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: prev_version = get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() - env = compose_node_env(env_filepath, node_type=NodeTypes.SYNC) + env = compose_node_env(env_filepath, node_type=NodeType.SYNC) update_ok = update_sync_op(env_filepath, env) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(NodeTypes.SYNC) + alive = is_base_containers_alive(NodeType.SYNC) if not update_ok or not alive: print_node_cmd_error() return @@ -223,7 +221,7 @@ def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: @check_inited @check_user def cleanup_sync() -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeTypes.SYNC) + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.SYNC) schain_name = env['SCHAIN_NAME'] cleanup_sync_op(env, schain_name) logger.info('Sync node was cleaned up, all containers and data removed') @@ -234,7 +232,7 @@ def compose_node_env( inited_node: bool = False, sync_schains: Optional[bool] = None, pull_config_for_schain: Optional[str] = None, - node_type: NodeTypes = NodeTypes.REGULAR, + node_type: NodeType = NodeType.REGULAR, save: bool = True, is_mirage_boot: bool = False, ) -> dict[str, str]: @@ -245,7 +243,10 @@ def compose_node_env( else: env_params = get_validated_env_config(INIT_ENV_FILEPATH, node_type=node_type) - mnt_dir = SCHAINS_MNT_DIR_SYNC if node_type == NodeTypes.SYNC else SCHAINS_MNT_DIR_REGULAR + if node_type == NodeType.SYNC or node_type == NodeType.MIRAGE: + mnt_dir = SCHAINS_MNT_DIR_SINGLE_CHAIN + else: + mnt_dir = SCHAINS_MNT_DIR_REGULAR env = { 'SKALE_DIR': SKALE_DIR, @@ -255,10 +256,10 @@ def compose_node_env( **env_params, } - if inited_node and not node_type == NodeTypes.SYNC: + if inited_node and not node_type == NodeType.SYNC: env['FLASK_SECRET_KEY'] = get_flask_secret_key() - if sync_schains and not node_type == NodeTypes.SYNC: + if sync_schains and not node_type == NodeType.SYNC: env['BACKUP_RUN'] = 'True' if pull_config_for_schain: @@ -415,17 +416,17 @@ def turn_on(maintenance_off, sync_schains, env_file): set_maintenance_mode_off() -def get_base_containers_amount(node_type: NodeTypes = NodeTypes.REGULAR): - if node_type == NodeTypes.SYNC: +def get_base_containers_amount(node_type: NodeType = NodeType.REGULAR): + if node_type == NodeType.SYNC: return len(BASE_SYNC_COMPOSE_SERVICES) - elif node_type == NodeTypes.MIRAGE: + elif node_type == NodeType.MIRAGE: return len(BASE_MIRAGE_COMPOSE_SERVICES) else: return len(BASE_SKALE_COMPOSE_SERVICES) -def is_base_containers_alive(node_type: NodeTypes = NodeTypes.REGULAR) -> bool: - if node_type == NodeTypes.MIRAGE: +def is_base_containers_alive(node_type: NodeType = NodeType.REGULAR) -> bool: + if node_type == NodeType.MIRAGE: prefix = 'mirage_' else: prefix = 'skale_' diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 44c6d83a..34f05cea 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -13,7 +13,7 @@ NODE_CONFIG_PATH, NODE_CLI_STATUS_FILENAME, SCHAIN_NODE_DATA_PATH, - SCHAINS_MNT_DIR_SYNC, + SCHAINS_MNT_DIR_SINGLE_CHAIN, ) from node_cli.configs.env import get_validated_env_config @@ -222,7 +222,7 @@ def ensure_schain_volume(schain: str, schain_type: str, env_type: str) -> None: logger.warning('Volume %s already exists', schain) -def cleanup_sync_datadir(schain_name: str, base_path: str = SCHAINS_MNT_DIR_SYNC) -> None: +def cleanup_sync_datadir(schain_name: str, base_path: str = SCHAINS_MNT_DIR_SINGLE_CHAIN) -> None: base_path = os.path.join(base_path, schain_name) regular_folders_pattern = f'{base_path}/[!snapshots]*' logger.info('Removing regular folders') diff --git a/node_cli/main.py b/node_cli/main.py index fd16cd78..7d0c0af4 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -27,6 +27,7 @@ import click from node_cli.cli import __version__ +from node_cli.cli.exit import exit_cli from node_cli.cli.health import health_cli from node_cli.cli.info import BUILD_DATETIME, COMMIT, BRANCH, OS, VERSION, TYPE from node_cli.cli.logs import logs_cli @@ -35,16 +36,14 @@ from node_cli.cli.schains import schains_cli from node_cli.cli.wallet import wallet_cli from node_cli.cli.ssl import ssl_cli -from node_cli.cli.exit import exit_cli from node_cli.cli.resources_allocation import resources_allocation_cli from node_cli.cli.sync_node import sync_node_cli from node_cli.cli.mirage_boot import mirage_boot_cli from node_cli.cli.mirage_node import mirage_node_cli - - -from node_cli.utils.helper import safe_load_texts, init_default_logger -from node_cli.configs import LONG_LINE from node_cli.core.host import init_logs_dir +from node_cli.core.node import NodeType +from node_cli.configs import LONG_LINE +from node_cli.utils.helper import safe_load_texts, init_default_logger from node_cli.utils.helper import error_exit TEXTS = safe_load_texts() @@ -83,9 +82,9 @@ def info(): def get_sources_list() -> List[click.MultiCommand]: - if TYPE == 'sync': + if TYPE == NodeType.SYNC: return [cli, sync_node_cli, ssl_cli] - elif TYPE == 'mirage': + elif TYPE == NodeType.MIRAGE: return [ cli, logs_cli, diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 6febd72d..131f8346 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -39,7 +39,7 @@ ) from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config -from node_cli.core.node import NodeTypes +from node_cli.core.node import NodeType from node_cli.core.node_options import NodeOptions from node_cli.core.resources import update_resource_allocation, init_shared_space_volume from node_cli.core.schains import ( @@ -149,8 +149,7 @@ def update(env_filepath: str, env: Dict) -> bool: @checked_host def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: - compose_rm(env, node_type=NodeTypes.MIRAGE) - remove_dynamic_containers() + compose_rm(env, node_type=NodeType.MIRAGE) sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -183,7 +182,7 @@ def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: distro.version(), ) update_images(env=env) - compose_up(env, node_type=NodeTypes.MIRAGE) + compose_up(env, node_type=NodeType.MIRAGE) return True @@ -249,7 +248,7 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: update_resource_allocation(env_type=env['ENV_TYPE']) update_images(env=env) - compose_up(env, node_type=NodeTypes.MIRAGE, is_mirage_boot=True) + compose_up(env, node_type=NodeType.MIRAGE, is_mirage_boot=True) def init_sync( @@ -302,11 +301,11 @@ def init_sync( update_images(env=env, sync_node=True) - compose_up(env, node_type=NodeTypes.SYNC) + compose_up(env, node_type=NodeType.SYNC) def update_sync(env_filepath: str, env: Dict) -> bool: - compose_rm(env, node_type=NodeTypes.MIRAGE) + compose_rm(env, node_type=NodeType.SYNC) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) download_skale_node(env['CONTAINER_CONFIGS_STREAM'], env.get('CONTAINER_CONFIGS_DIR')) @@ -334,11 +333,11 @@ def update_sync(env_filepath: str, env: Dict) -> bool: ) update_images(env=env, sync_node=True) - compose_up(env, node_type=NodeTypes.SYNC) + compose_up(env, node_type=NodeType.SYNC) return True -def turn_off(env: dict, node_type: NodeTypes = NodeTypes.REGULAR) -> None: +def turn_off(env: dict, node_type: NodeType = NodeType.REGULAR) -> None: logger.info('Turning off the node...') compose_rm(env=env, node_type=node_type) remove_dynamic_containers() @@ -443,7 +442,7 @@ def restore_mirage(env, backup_path, config_only=False): ) if not config_only: - compose_up(env, node_type=NodeTypes.MIRAGE) + compose_up(env, node_type=NodeType.MIRAGE) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -458,7 +457,7 @@ def restore_mirage(env, backup_path, config_only=False): def cleanup_sync(env, schain_name: str) -> None: - turn_off(env, node_type=NodeTypes.SYNC) + turn_off(env, node_type=NodeType.SYNC) cleanup_sync_datadir(schain_name=schain_name) rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) diff --git a/node_cli/operations/volume.py b/node_cli/operations/volume.py index d6d6a966..1595a442 100644 --- a/node_cli/operations/volume.py +++ b/node_cli/operations/volume.py @@ -30,7 +30,7 @@ DOCKER_LVMPY_REPO_URL, FILESTORAGE_MAPPING, SCHAINS_MNT_DIR_REGULAR, - SCHAINS_MNT_DIR_SYNC, + SCHAINS_MNT_DIR_SINGLE_CHAIN, SKALE_STATE_DIR, ) @@ -137,7 +137,7 @@ def prepare_block_device(block_device, force=False): else: logger.info('%s contains %s filesystem', block_device, filesystem) format_as_btrfs(block_device) - mount_device(block_device, SCHAINS_MNT_DIR_SYNC) + mount_device(block_device, SCHAINS_MNT_DIR_SINGLE_CHAIN) def max_resize_btrfs(path): diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index d2d710b1..a80d0289 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -36,7 +36,7 @@ SGX_CERTIFICATES_DIR_NAME, NGINX_CONTAINER_NAME, ) -from node_cli.core.node import NodeTypes +from node_cli.core.node import NodeType logger = logging.getLogger(__name__) @@ -56,7 +56,7 @@ 'filebeat', ) BASE_SYNC_COMPOSE_SERVICES = ('skale-sync-admin', 'nginx') -PROTO_MIRAGE_COMPOSE_SERVICES = ( +CORE_MIRAGE_COMPOSE_SERVICES = ( 'transaction-manager', 'skale-api', 'redis', @@ -64,8 +64,8 @@ 'nginx', 'filebeat', ) -BASE_MIRAGE_COMPOSE_SERVICES = (*PROTO_MIRAGE_COMPOSE_SERVICES, 'mirage-admin') -BASE_MIRAGE_BOOT_COMPOSE_SERVICES = (*PROTO_MIRAGE_COMPOSE_SERVICES, 'mirage-boot') +BASE_MIRAGE_COMPOSE_SERVICES = (*CORE_MIRAGE_COMPOSE_SERVICES, 'mirage-admin') +BASE_MIRAGE_BOOT_COMPOSE_SERVICES = (*CORE_MIRAGE_COMPOSE_SERVICES, 'mirage-boot') MONITORING_COMPOSE_SERVICES = ( 'node-exporter', @@ -238,7 +238,7 @@ def is_volume_exists(name: str, dutils=None): return True -def compose_rm(env={}, node_type: NodeTypes = NodeTypes.REGULAR): +def compose_rm(env={}, node_type: NodeType = NodeType.REGULAR): logger.info('Removing compose containers') compose_path = get_compose_path(node_type) run_cmd( @@ -258,36 +258,36 @@ def compose_rm(env={}, node_type: NodeTypes = NodeTypes.REGULAR): def compose_pull(env: dict, sync_node: bool = False): logger.info('Pulling compose containers') - compose_path = get_compose_path(NodeTypes.SYNC) + compose_path = get_compose_path(NodeType.SYNC) run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'pull'), env=env) def compose_build(env: dict, sync_node: bool = False): logger.info('Building compose containers') - compose_path = get_compose_path(NodeTypes.SYNC) + compose_path = get_compose_path(NodeType.SYNC) run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'build'), env=env) -def get_compose_path(node_type: NodeTypes = NodeTypes.REGULAR) -> str: - if node_type == NodeTypes.SYNC: +def get_compose_path(node_type: NodeType = NodeType.REGULAR) -> str: + if node_type == NodeType.SYNC: return SYNC_COMPOSE_PATH - elif node_type == NodeTypes.MIRAGE: + elif node_type == NodeType.MIRAGE: return MIRAGE_COMPOSE_PATH else: return COMPOSE_PATH -def get_compose_services(node_type: NodeTypes = NodeTypes.REGULAR) -> tuple: - if node_type == NodeTypes.SYNC: +def get_compose_services(node_type: NodeType = NodeType.REGULAR) -> tuple: + if node_type == NodeType.SYNC: return BASE_SYNC_COMPOSE_SERVICES - elif node_type == NodeTypes.MIRAGE: + elif node_type == NodeType.MIRAGE: return BASE_MIRAGE_COMPOSE_SERVICES else: return BASE_SKALE_COMPOSE_SERVICES def get_up_compose_cmd( - node_type: NodeTypes = NodeTypes.REGULAR, services: Optional[tuple] = None + node_type: NodeType = NodeType.REGULAR, services: Optional[tuple] = None ) -> tuple: compose_path = get_compose_path(node_type) @@ -297,38 +297,38 @@ def get_up_compose_cmd( return ('docker', 'compose', '-f', compose_path, 'up', '-d', *services) -def compose_up(env, node_type: NodeTypes = NodeTypes.REGULAR, is_mirage_boot: bool = False): - if node_type == NodeTypes.SYNC: +def compose_up(env, node_type: NodeType = NodeType.REGULAR, is_mirage_boot: bool = False): + if node_type == NodeType.SYNC: logger.info('Running containers for sync node') - run_cmd(cmd=get_up_compose_cmd(node_type=NodeTypes.SYNC), env=env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.SYNC), env=env) return if 'SGX_CERTIFICATES_DIR_NAME' not in env: env['SGX_CERTIFICATES_DIR_NAME'] = SGX_CERTIFICATES_DIR_NAME - if node_type == NodeTypes.MIRAGE: + if node_type == NodeType.MIRAGE: logger.info('Running mirage base set of containers') if not is_mirage_boot: logger.debug('Launching mirage containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(node_type=NodeTypes.MIRAGE), env=env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.MIRAGE), env=env) else: logger.debug('Launching mirage boot containers with env %s', env) run_cmd( cmd=get_up_compose_cmd( - node_type=NodeTypes.MIRAGE, services=BASE_MIRAGE_BOOT_COMPOSE_SERVICES + node_type=NodeType.MIRAGE, services=BASE_MIRAGE_BOOT_COMPOSE_SERVICES ), env=env, ) else: logger.info('Running skale node base set of containers') logger.debug('Launching skale node containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(node_type=NodeTypes.REGULAR), env=env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.REGULAR), env=env) if 'TG_API_KEY' in env and 'TG_CHAT_ID' in env: logger.info('Running containers for Telegram notifications') run_cmd( cmd=get_up_compose_cmd( - node_type=NodeTypes.REGULAR, services=NOTIFICATION_COMPOSE_SERVICES + node_type=NodeType.REGULAR, services=NOTIFICATION_COMPOSE_SERVICES ), env=env, ) @@ -337,7 +337,7 @@ def compose_up(env, node_type: NodeTypes = NodeTypes.REGULAR, is_mirage_boot: bo logger.info('Running monitoring containers') run_cmd( cmd=get_up_compose_cmd( - node_type=NodeTypes.REGULAR, services=MONITORING_COMPOSE_SERVICES + node_type=NodeType.REGULAR, services=MONITORING_COMPOSE_SERVICES ), env=env, ) @@ -378,20 +378,20 @@ def is_container_running(name: str, dclient: Optional[DockerClient] = None) -> b def is_api_running( - node_type: NodeTypes = NodeTypes.REGULAR, dclient: Optional[DockerClient] = None + node_type: NodeType = NodeType.REGULAR, dclient: Optional[DockerClient] = None ) -> bool: - if node_type == NodeTypes.MIRAGE: + if node_type == NodeType.MIRAGE: return is_container_running(name='mirage_api', dclient=dclient) else: return is_container_running(name='skale_api', dclient=dclient) def is_admin_running( - node_type: NodeTypes = NodeTypes.REGULAR, client: Optional[DockerClient] = None + node_type: NodeType = NodeType.REGULAR, client: Optional[DockerClient] = None ) -> bool: - if node_type == NodeTypes.MIRAGE: + if node_type == NodeType.MIRAGE: return is_container_running(name='mirage_admin', dclient=client) - elif node_type == NodeTypes.SYNC: + elif node_type == NodeType.SYNC: return is_container_running(name='skale_sync_admin', dclient=client) else: return is_container_running(name='skale_admin', dclient=client) diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index e008aba6..a8237026 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -24,10 +24,8 @@ def __new__( os_id=DEFAULT_OS_ID, os_version=DEFAULT_OS_VERSION, ): - # If docker_lvmpy_stream is None, use the default value - actual_docker_lvmpy_stream = docker_lvmpy_stream or DEFAULT_DOCKER_LVMPY_STREAM return super(CliMeta, cls).__new__( - cls, version, config_stream, actual_docker_lvmpy_stream, os_id, os_version + cls, version, config_stream, docker_lvmpy_stream, os_id, os_version ) diff --git a/scripts/generate_info.sh b/scripts/generate_info.sh index 32253148..f2d2cb2a 100755 --- a/scripts/generate_info.sh +++ b/scripts/generate_info.sh @@ -3,7 +3,7 @@ set -e VERSION=$1 BRANCH=$2 -TYPE=$3 +TYPE_STR=$3 USAGE_MSG='Usage: generate_info.sh [VERSION] [BRANCH] [TYPE]' @@ -17,7 +17,7 @@ if [ -z "$BRANCH" ]; then echo $USAGE_MSG exit 1 fi -if [ -z "$TYPE" ]; then +if [ -z "$TYPE_STR" ]; then (>&2 echo 'You should provide type: normal, sync or mirage') echo $USAGE_MSG exit 1 @@ -31,12 +31,31 @@ LATEST_COMMIT=$(git rev-parse HEAD) CURRENT_DATETIME="$(date "+%Y-%m-%d %H:%M:%S")" OS="$(uname -s)-$(uname -m)" +case "$TYPE_STR" in + normal) + TYPE_ENUM="NodeType.REGULAR" + ;; + sync) + TYPE_ENUM="NodeType.SYNC" + ;; + mirage) + TYPE_ENUM="NodeType.MIRAGE" + ;; + *) + (>&2 echo "Error: Invalid type '$TYPE_STR'. Must be 'normal', 'sync', or 'mirage'") + exit 1 + ;; +esac + rm -f "$DIST_INFO_FILEPATH" touch "$DIST_INFO_FILEPATH" +echo "from node_cli.core.node import NodeType" >> "$DIST_INFO_FILEPATH" +echo "" >> "$DIST_INFO_FILEPATH" + echo "BUILD_DATETIME = '$CURRENT_DATETIME'" >> "$DIST_INFO_FILEPATH" echo "COMMIT = '$LATEST_COMMIT'" >> "$DIST_INFO_FILEPATH" echo "BRANCH = '$BRANCH'" >> "$DIST_INFO_FILEPATH" echo "OS = '$OS'" >> "$DIST_INFO_FILEPATH" echo "VERSION = '$VERSION'" >> "$DIST_INFO_FILEPATH" -echo "TYPE = '$TYPE'" >> "$DIST_INFO_FILEPATH" +echo "TYPE = $TYPE_ENUM" >> "$DIST_INFO_FILEPATH" \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index a7c2c0cd..88691472 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -45,7 +45,7 @@ from node_cli.utils.docker_utils import docker_client from node_cli.utils.global_config import generate_g_config_file -from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3, TEST_SCHAINS_MNT_DIR_SYNC +from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3, TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN TEST_ENV_PARAMS = """ @@ -286,11 +286,11 @@ def tmp_schains_dir(): @pytest.fixture def tmp_sync_datadir(): - os.makedirs(TEST_SCHAINS_MNT_DIR_SYNC, exist_ok=True) + os.makedirs(TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN, exist_ok=True) try: - yield TEST_SCHAINS_MNT_DIR_SYNC + yield TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN finally: - shutil.rmtree(TEST_SCHAINS_MNT_DIR_SYNC) + shutil.rmtree(TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN) @pytest.fixture diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 7649d020..1fdc57fd 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -11,7 +11,7 @@ from node_cli.configs import NODE_DATA_PATH from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH -from node_cli.core.node import NodeTypes, get_base_containers_amount, is_base_containers_alive +from node_cli.core.node import NodeType, get_base_containers_amount, is_base_containers_alive from node_cli.core.node import init, pack_dir, update, is_update_safe from node_cli.utils.meta import CliMeta @@ -187,16 +187,16 @@ def test_update_node(mocked_g_config, resource_file): def test_is_update_safe(): assert is_update_safe() - assert is_update_safe(node_type=NodeTypes.SYNC) + assert is_update_safe(node_type=NodeType.SYNC) with mock.patch('node_cli.core.node.is_admin_running', return_value=True): with mock.patch('node_cli.core.node.is_api_running', return_value=True): assert not is_update_safe() - assert is_update_safe(node_type=NodeTypes.SYNC) + assert is_update_safe(node_type=NodeType.SYNC) with mock.patch('node_cli.core.node.is_admin_running', return_value=True): assert is_update_safe() - assert not is_update_safe(node_type=NodeTypes.SYNC) + assert not is_update_safe(node_type=NodeType.SYNC) with mock.patch('node_cli.utils.docker_utils.is_container_running', return_value=True): with mock.patch( diff --git a/tests/helper.py b/tests/helper.py index f2209088..08d67c20 100644 --- a/tests/helper.py +++ b/tests/helper.py @@ -27,7 +27,7 @@ BLOCK_DEVICE = os.getenv('BLOCK_DEVICE') -TEST_SCHAINS_MNT_DIR_SYNC = 'tests/tmp' +TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN = 'tests/tmp' TEST_META_V1 = {'version': '0.1.1', 'config_stream': 'develop'} From c802e19f7f79f7663798c9a01994ec4f7176ff3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 21 Apr 2025 16:57:39 +0100 Subject: [PATCH 029/332] Updated README to include Mirage commands and to have better formatting. Removed some unused options from mirage commands. --- README.md | 664 ++++++++++++++++++++++++----------- node_cli/cli/mirage_boot.py | 5 +- node_cli/core/mirage_boot.py | 2 +- 3 files changed, 463 insertions(+), 208 deletions(-) diff --git a/README.md b/README.md index 32276159..e2414498 100644 --- a/README.md +++ b/README.md @@ -4,69 +4,105 @@ ![Test](https://github.com/skalenetwork/node-cli/workflows/Test/badge.svg) [![Discord](https://img.shields.io/discord/534485763354787851.svg)](https://discord.gg/vvUtWJB) -SKALE Node CLI, part of the SKALE suite of validator tools, is the command line to setup, register and maintain your SKALE node. +SKALE Node CLI, part of the SKALE suite of validator tools, is the command line interface to setup, register and maintain your SKALE node. It comes in three distinct build types: Standard (for validator nodes), Sync (for dedicated sChain synchronization), and Mirage (for the Mirage network). ## Table of Contents 1. [Installation](#installation) -2. [CLI usage](#cli-usage) - 2.1 [Top level commands](#top-level-commands) - 2.2 [Node](#node-commands) - 2.3 [Wallet](#wallet-commands) - 2.4 [sChains](#schain-commands) - 2.5 [Health](#health-commands) - 2.6 [SSL](#ssl-commands) - 2.7 [Logs](#logs-commands) - 2.8 [Resources allocation](#resources-allocation-commands) -3. [Sync CLI usage](#sync-cli-usage) - 3.1 [Top level commands](#top-level-commands-sync) + 1.1 [Standard Node Binary](#standard-node-binary) + 1.2 [Sync Node Binary](#sync-node-binary) + 1.3 [Mirage Node Binary](#mirage-node-binary) + 1.4 [Permissions and Testing](#permissions-and-testing) +2. [Standard Node Usage (`skale` - Normal Build)](#standard-node-usage-skale---normal-build) + 2.1 [Top level commands (Standard)](#top-level-commands-standard) + 2.2 [Node commands (Standard)](#node-commands-standard) + 2.3 [Wallet commands (Standard)](#wallet-commands-standard) + 2.4 [sChain commands (Standard)](#schain-commands-standard) + 2.5 [Health commands (Standard)](#health-commands-standard) + 2.6 [SSL commands (Standard)](#ssl-commands-standard) + 2.7 [Logs commands (Standard)](#logs-commands-standard) + 2.8 [Resources allocation commands (Standard)](#resources-allocation-commands-standard) +3. [Sync Node Usage (`skale` - Sync Build)](#sync-node-usage-skale---sync-build) + 3.1 [Top level commands (Sync)](#top-level-commands-sync) 3.2 [Sync node commands](#sync-node-commands) -4. [Exit codes](#exit-codes) -5. [Development](#development) +4. [Mirage Node Usage (`mirage`)](#mirage-node-usage-mirage) + 4.1 [Top level commands (Mirage)](#top-level-commands-mirage) + 4.2 [Mirage Boot commands](#mirage-boot-commands) + 4.3 [Mirage Node commands](#mirage-node-commands) +5. [Exit codes](#exit-codes) +6. [Development](#development) + +--- ## Installation -- Prerequisites +### Prerequisites + +Ensure that the following packages are installed: **docker**, **docker-compose** (1.27.4+) -Ensure that the following package is installed: **docker**, **docker-compose** (1.27.4+) +### Standard Node Binary -- Download the executable +This binary (`skale-VERSION-OS`) is used for managing standard SKALE validator nodes. ```shell -VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m` > /usr/local/bin/skale" +# Replace {version} with the desired release version (e.g., 2.6.0) +VERSION_NUM={version} && \ +sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m` > /usr/local/bin/skale" ``` -For Sync node version: +### Sync Node Binary + +This binary (`skale-VERSION-OS-sync`) is used for managing dedicated Sync nodes. **Ensure you download the correct `-sync` suffixed binary for Sync node operations.** ```shell -VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-sync > /usr/local/bin/skale" +# Replace {version} with the desired release version (e.g., 2.6.0) +VERSION_NUM={version} && \ +sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-sync > /usr/local/bin/skale" ``` -For Mirage node version: +### Mirage Node Binary + +This binary (`skale-VERSION-OS-mirage`) is used specifically for managing nodes on the Mirage network. It is named `mirage`. ```shell -VERSION_NUM={put the version number here} && sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-mirage > /usr/local/bin/mirage" +# Replace {version} with the desired release version (e.g., 2.6.0) +VERSION_NUM={version} && \ +sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-mirage > /usr/local/bin/mirage" ``` -- Apply executable permissions to the downloaded binary: +### Permissions and Testing + +Apply executable permissions to the downloaded binary (adjust name accordingly): ```shell -chmod +x /usr/local/bin/skale +# For Standard or Sync binary +sudo chmod +x /usr/local/bin/skale + +# For Mirage binary +sudo chmod +x /usr/local/bin/mirage ``` -- Test the installation +Test the installation: ```shell +# Standard or Sync build skale --help + +# Mirage build +mirage --help ``` -## CLI usage +--- + +## Standard Node Usage (`skale` - Normal Build) + +Commands available in the **standard `skale` binary** for managing nodes. -### Top level commands +### Top level commands (Standard) #### Info -Print build info +Print build info for the `skale` (normal) binary. ```shell skale info @@ -74,7 +110,7 @@ skale info #### Version -Print version number +Print version number for the `skale` (normal) binary. ```shell skale version @@ -84,13 +120,13 @@ Options: - `--short` - prints version only, without additional text. -### Node commands +### Node commands (Standard) > Prefix: `skale node` #### Node information -Get base info about SKALE node +Get base info about the standard SKALE node. ```shell skale node info @@ -98,11 +134,11 @@ skale node info Options: -`-f/--format json/text` - optional +- `-f/--format json/text` - optional. #### Node initialization -Initialize a SKALE node on current machine +Initialize a standard SKALE node on the current machine. > :warning: **Avoid re-initializing a node that’s already initialized**: Run `skale node info` first to confirm the current initialization state. @@ -112,32 +148,32 @@ skale node init [ENV_FILE] Arguments: -- `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command) +- `ENV_FILE` - path to .env file (required). -You should specify the following environment variables: +Required environment variables in `ENV_FILE`: -- `SGX_SERVER_URL` - SGX server URL -- `DISK_MOUNTPOINT` - disk mount point for storing sChains data -- `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use -- `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use -- `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS` - SKALE Manager `message_proxy_mainnet` contract alias or address -- `IMA_CONTRACTS` - IMA `skale_manager` contract alias or address -- `FILEBEAT_URL` - URL to the Filebeat log server -- `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') +- `SGX_SERVER_URL` - SGX server URL. +- `DISK_MOUNTPOINT` - Mount point for storing sChains data. +- `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy` to use. +- `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` to use. +- `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. +- `MANAGER_CONTRACTS` - SKALE Manager `message_proxy_mainnet` contract alias or address. +- `IMA_CONTRACTS` - IMA `skale_manager` contract alias or address. +- `FILEBEAT_HOST` - URL of the Filebeat log server. +- `ENV_TYPE` - Environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet'). > In `MANAGER_CONTRACTS` and `IMA_CONTRACTS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). -> :warning: If you are using a custom network or a contract which isn't recognized by underlying skale library, you **MUST** provide a direct contract address. +> :warning: If you are using a custom network or a contract which isn't recognized by the underlying `skale-contracts` library, you **MUST** provide a direct contract address. Optional variables: - `TG_API_KEY` - Telegram API key - `TG_CHAT_ID` - Telegram chat ID -- `MONITORING_CONTAINERS` - will enable monitoring containers (`cadvisor`, `node-exporter`). +- `MONITORING_CONTAINERS` - Enable monitoring containers (`cadvisor`, `node-exporter`). #### Node initialization from backup -Restore SKALE node on another machine +Restore a standard SKALE node on another machine from a backup. ```shell skale node restore [BACKUP_PATH] [ENV_FILE] @@ -145,139 +181,148 @@ skale node restore [BACKUP_PATH] [ENV_FILE] Arguments: -- `BACKUP_PATH` - path to the archive with backup data generated by `skale node backup` command -- `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command) +- `BACKUP_PATH` - Path to the archive created by `skale node backup`. +- `ENV_FILE` - Path to .env file with configuration for the restored node. #### Node backup -Generate backup file to restore SKALE node on another machine +Generate a backup archive of the standard SKALE node's state. ```shell -skale node backup [BACKUP_FOLDER_PATH] [ENV_FILE] +skale node backup [BACKUP_FOLDER_PATH] ``` Arguments: -- `BACKUP_FOLDER_PATH` - path to the folder where the backup file will be saved +- `BACKUP_FOLDER_PATH` - Path to the folder where the backup file will be saved. #### Node Registration +Register the standard node with the SKALE Manager contract. + ```shell -skale node register +skale node register --name --ip --domain [--port ] ``` Required arguments: -- `--ip` - public IP for RPC connections and consensus -- `--domain`/`-d` - SKALE node domain name -- `--name` - SKALE node name +- `--ip` - Public IP for RPC connections and consensus. +- `--domain`/`-d` - SKALE node domain name. +- `--name` - SKALE node name. Optional arguments: -- `--port` - public port - beginning of the port range for node SKALE Chains (default: `10000`) +- `--port` - Base port for node sChains (default: `10000`). #### Node update -Update SKALE node on current machine +Update the standard SKALE node software and configuration. ```shell -skale node update [ENV_FILEPATH] +skale node update [ENV_FILEPATH] [--yes] ``` -Options: - -- `--yes` - update without additional confirmation - Arguments: -- `ENV_FILEPATH` - path to env file where parameters are defined +- `ENV_FILEPATH` - Path to the .env file containing potentially updated parameters. + +Options: -You can also specify a file with environment variables -which will update parameters in env file used during skale node init. +- `--yes` - Update without confirmation prompt. #### Node turn-off -Turn-off SKALE node on current machine and optionally set it to the maintenance mode +Turn off the standard SKALE node containers. ```shell -skale node turn-off +skale node turn-off [--maintenance-on] [--yes] ``` Options: -- `--maintenance-on` - set SKALE node into maintenance mode before turning off -- `--yes` - turn off without additional confirmation +- `--maintenance-on` - Set node to maintenance mode before turning off. +- `--yes` - Turn off without confirmation. #### Node turn-on -Turn on SKALE node on current machine and optionally disable maintenance mode +Turn on the standard SKALE node containers. ```shell -skale node turn-on [ENV_FILEPATH] +skale node turn-on [ENV_FILEPATH] [--maintenance-off] [--yes] ``` -Options: - -- `--maintenance-off` - turn off maintenance mode after turning on the node -- `--yes` - turn on without additional confirmation - Arguments: -- `ENV_FILEPATH` - path to env file where parameters are defined +- `ENV_FILEPATH` - Path to the .env file. + +Options: -You can also specify a file with environment variables -which will update parameters in env file used during skale node init. +- `--maintenance-off` - Turn off maintenance mode after turning on. +- `--yes` - Turn on without additional confirmation. #### Node maintenance -Set SKALE node into maintenance mode +Control the node's maintenance status in SKALE Manager. ```shell -skale node maintenance-on +# Set maintenance ON +skale node maintenance-on [--yes] + +# Set maintenance OFF +skale node maintenance-off ``` Options: -- `--yes` - set without additional confirmation +- `--yes` - Perform action without additional confirmation. -Switch off maintenance mode +#### Domain name + +Set the standard node's domain name. ```shell -skale node maintenance-off +skale node set-domain --domain [--yes] ``` -#### Domain name +Required Options: + +- `--domain`/`-d` - The new SKALE node domain name. -Set SKALE node domain name +Options: + +- `--yes` - Set without additional confirmation. + +#### Skale Node Signature + +Get the node signature for a validator ID. ```shell -skale node set-domain +skale node signature ``` -Options: +Arguments: -- `--domain`/`-d` - SKALE node domain name -- `--yes` - set without additional confirmation +- `VALIDATOR_ID` - The ID of the validator requesting the signature. -### Wallet commands +### Wallet commands (Standard) > Prefix: `skale wallet` -Commands related to Ethereum wallet associated with SKALE node +Commands related to the Ethereum wallet associated with the standard SKALE node. #### Wallet information ```shell -skale wallet info +skale wallet info [-f json/text] ``` Options: -`-f/--format json/text` - optional +- `-f/--format json/text` - optional. #### Wallet setting -Set local wallet for the SKALE node +Set the local wallet private key for the node. ```shell skale wallet set --private-key $ETH_PRIVATE_KEY @@ -285,188 +330,197 @@ skale wallet set --private-key $ETH_PRIVATE_KEY #### Send ETH tokens -Send ETH tokens from SKALE node wallet to specific address +Send ETH from the node's wallet. ```shell -skale wallet send [ADDRESS] [AMOUNT] +skale wallet send [--yes] ``` Arguments: -- `ADDRESS` - Ethereum receiver address -- `AMOUNT` - Amount of ETH tokens to send +- `RECEIVER_ADDRESS` - Ethereum receiver address. +- `AMOUNT_ETH` - Amount of ETH tokens to send. Optional arguments: -`--yes` - Send without additional confirmation +- `--yes` - Send without additional confirmation. -### sChain commands +### sChain commands (Standard) > Prefix: `skale schains` -#### SKALE Chain list +Commands for interacting with sChains managed by the standard node. -List of SKALE Chains served by connected node +#### List sChains + +List of SKALE Chains served by connected node. ```shell skale schains ls ``` -#### SKALE Chain configuration +#### Get sChain config + +Show the configuration for a specific SKALE Chain. ```shell -skale schains config SCHAIN_NAME +skale schains config ``` -#### SKALE Chain DKG status +#### Get DKG status -List DKG status for each SKALE Chain on the node +List DKG status for each SKALE Chain on the node. ```shell skale schains dkg ``` -#### SKALE Chain info +#### Get sChain info -Show information about SKALE Chain on node +Show information about a specific SKALE Chain on the node. ```shell -skale schains info SCHAIN_NAME +skale schains info [--json] ``` Options: -- `--json` - Show info in JSON format +- `--json` - Show info in JSON format. -#### SKALE Chain repair +#### Repair sChain -Turn on repair mode for SKALE Chain +Turn on repair mode for a specific SKALE Chain. ```shell -skale schains repair SCHAIN_NAME +skale schains repair ``` -### Health commands +### Health commands (Standard) > Prefix: `skale health` -#### SKALE containers +Commands to check the health of the standard node and its components. + +#### List containers -List all SKALE containers running on the connected node +List all SKALE containers running on the connected node. ```shell -skale health containers +skale health containers [-a/--all] ``` Options: -- `-a/--all` - list all containers (by default - only running) +- `-a/--all` - list all containers (by default - only running). -#### sChains healthchecks +#### Get sChains healthchecks -Show health check results for all SKALE Chains on the node +Show health check results for all SKALE Chains on the node. ```shell -skale health schains +skale health schains [--json] ``` Options: -- `--json` - Show data in JSON format +- `--json` - Show data in JSON format. -#### SGX +#### Check SGX server status Status of the SGX server. Returns the SGX server URL and connection status. ```shell -$ skale health sgx - -SGX server status: -┌────────────────┬────────────────────────────┐ -│ SGX server URL │ https://0.0.0.0:1026/ │ -├────────────────┼────────────────────────────┤ -│ Status │ CONNECTED │ -└────────────────┴────────────────────────────┘ +skale health sgx ``` -### SSL commands +### SSL commands (Standard) > Prefix: `skale ssl` -#### SSL Status +Manage SSL certificates for the standard node. + +#### Check SSL Status -Status of the SSL certificates on the node +Status of the SSL certificates on the node. ```shell skale ssl status ``` -Admin API URL: \[GET] `/api/ssl/status` +Admin API URL: `[GET] /api/ssl/status` #### Upload certificates -Upload new SSL certificates +Upload new SSL certificates. ```shell -skale ssl upload +skale ssl upload -c -k [-f/--force] ``` -##### Options +Options: -- `-c/--cert-path` - Path to the certificate file -- `-k/--key-path` - Path to the key file -- `-f/--force` - Overwrite existing certificates +- `-c/--cert-path` - Path to the certificate file. +- `-k/--key-path` - Path to the key file. +- `-f/--force` - Overwrite existing certificates. -Admin API URL: \[GET] `/api/ssl/upload` +Admin API URL: `[POST] /api/ssl/upload` -#### Check ssl certificate +#### Check certificate -Check SSL certificate by connecting to the health-check SSL server +Check SSL certificate by connecting to the health-check SSL server. ```shell -skale ssl check +skale ssl check [-c ] [-k ] [--type ] [--port ] [--no-client] ``` -##### Options +Options: -- `-c/--cert-path` - Path to the certificate file (default: uploaded using `skale ssl upload` certificate) -- `-k/--key-path` - Path to the key file (default: uploaded using `skale ssl upload` key) -- `--type/-t` - Check type (`openssl` - openssl cli check, `skaled` - skaled-based check, `all` - both) -- `--port/-p` - Port to start healthcheck server (default: `4536`) -- `--no-client` - Skip client connection (only make sure server started without errors) +- `-c/--cert-path` - Path to the certificate file (default: uploaded using `skale ssl upload` certificate). +- `-k/--key-path` - Path to the key file (default: uploaded using `skale ssl upload` key). +- `--type/-t` - Check type (`openssl` - openssl cli check, `skaled` - skaled-based check, `all` - both). +- `--port/-p` - Port to start healthcheck server (default: `4536`). +- `--no-client` - Skip client connection (only make sure server started without errors). -### Logs commands +### Logs commands (Standard) > Prefix: `skale logs` -#### CLI Logs +Access logs for the standard node. + +#### Fetch CLI Logs Fetch node CLI logs: ```shell -skale logs cli +skale logs cli [--debug] ``` Options: -- `--debug` - show debug logs; more detailed output +- `--debug` - show debug logs; more detailed output. -#### Dump Logs +#### Dump All Node Logs Dump all logs from the connected node: ```shell -skale logs dump [PATH] +skale logs dump [TARGET_PATH] [-c/--container ] ``` -Optional arguments: +Arguments: -- `--container`, `-c` - Dump logs only from specified container +- `TARGET_PATH` - Optional path to save the log dump archive. -### Resources allocation commands +Options: + +- `--container`, `-c` - Dump logs only from specified container. + +### Resources allocation commands (Standard) > Prefix: `skale resources-allocation` +Manage the resources allocation file for the standard node. + #### Show allocation file Show resources allocation file: @@ -475,40 +529,43 @@ Show resources allocation file: skale resources-allocation show ``` -#### Generate/update +#### Generate/update allocation file Generate/update allocation file: ```shell -skale resources-allocation generate [ENV_FILE] +skale resources-allocation generate [ENV_FILE] [--yes] [-f/--force] ``` Arguments: -- `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command) +- `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command). Options: -- `--yes` - generate without additional confirmation -- `-f/--force` - rewrite allocation file if it exists +- `--yes` - generate without additional confirmation. +- `-f/--force` - rewrite allocation file if it exists. -## Sync CLI usage +--- -A sync node is a node dedicated to synchronizing a single sChain. +## Sync Node Usage (`skale` - Sync Build) -### Top level commands sync +Commands available in the **sync `skale` binary** for managing dedicated Sync nodes. +Note that this binary contains a **different set of commands** compared to the standard build. -#### Info +### Top level commands (Sync) + +#### Info (Sync) -Print build info +Print build info for the `skale` (sync) binary. ```shell skale info ``` -#### Version +#### Version (Sync) -Print version number +Print version number for the `skale` (sync) binary. ```shell skale version @@ -524,69 +581,255 @@ Options: #### Sync node initialization -Initialize full sync SKALE node on current machine +Initialize a dedicated Sync node on the current machine. ```shell -skale sync-node init [ENV_FILE] +skale sync-node init [ENV_FILE] [--indexer | --archive] [--snapshot] [--snapshot-from ] [--yes] ``` Arguments: -- `ENV_FILE` - path to .env file (required parameters are listed in the `skale sync-node init` command) +- `ENV_FILE` - path to .env file (required). -You should specify the following environment variables: +Required environment variables in `ENV_FILE`: -- `DISK_MOUNTPOINT` - disk mount point for storing sChains data -- `DOCKER_LVMPY_STREAM` - stream of `docker-lvmpy` to use -- `CONTAINER_CONFIGS_STREAM` - stream of `skale-node` to use -- `ENDPOINT` - RPC endpoint of the node in the network where SKALE Manager is deployed -- `MANAGER_CONTRACTS` - SKALE Manager main contract alias or address -- `IMA_CONTRACTS` - IMA main contract alias or address -- `SCHAIN_NAME` - name of the SKALE chain to sync -- `ENV_TYPE` - environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet') +- `DISK_MOUNTPOINT` - Mount point for storing sChain data. +- `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy`. +- `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node`. +- `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. +- `MANAGER_CONTRACTS` - SKALE Manager alias or address. +- `IMA_CONTRACTS` - IMA alias or address. +- `SCHAIN_NAME` - Name of the specific SKALE chain to sync. +- `ENV_TYPE` - Environment type (e.g., 'mainnet', 'testnet'). > In `MANAGER_CONTRACTS` and `IMA_CONTRACTS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). -> :warning: If you are using a custom network or a contract which isn't recognized by underlying skale library, you **MUST** provide a direct contract address. +> :warning: If you are using a custom network or a contract which isn't recognized by the underlying `skale-contracts` library, you **MUST** provide a direct contract address. Options: -- `--indexer` - run sync node in indexer mode (disable block rotation) -- `--archive` - enable historic state and disable block rotation (can't be used with `--indexer`) -- `--snapshot` - start sync node from snapshot -- `--snapshot-from` - specify the IP of the node to take snapshot from -- `--yes` - initialize without additional confirmation +- `--indexer` - Run in indexer mode (disables block rotation). +- `--archive` - Run in archive mode (enable historic state and disable block rotation). +- `--snapshot` - Start sync node from snapshot. +- `--snapshot-from ` - Specify the IP of another node to download a snapshot from. +- `--yes` - Initialize without additional confirmation. #### Sync node update -Update full sync SKALE node on current machine +Update the Sync node software and configuration. ```shell -skale sync-node update [ENV_FILEPATH] +skale sync-node update [ENV_FILEPATH] [--yes] ``` -Options: +Arguments: -- `--yes` - update without additional confirmation +- `ENV_FILEPATH` - Path to the .env file. -Arguments: +Options: -- `ENV_FILEPATH` - path to env file where parameters are defined +- `--yes` - Update without additionalconfirmation. > NOTE: You can just update a file with environment variables used during `skale sync-node init`. #### Sync node cleanup -Cleanup full sync SKALE node on current machine +Remove all data and containers for the Sync node. + +```shell +skale sync-node cleanup [--yes] +``` + +Options: + +- `--yes` - Cleanup without confirmation. + +> WARNING: This command removes all Sync node data. + +--- + +## Mirage Node Usage (`mirage`) + +Commands available in the **`mirage` binary** for managing nodes on the Mirage network. + +### Top level commands (Mirage) + +#### Mirage Info + +Print build info for the `mirage` binary. + +```shell +mirage info +``` + +#### Mirage Version + +Print version number for the `mirage` binary. + +```shell +mirage version [--short] +``` + +Options: + +- `--short` - prints version only, without additional text. + +### Mirage Boot commands + +> Prefix: `mirage boot` + +Commands for a Mirage node in the Boot phase. + +#### Mirage Boot Initialization + +Initialize the Mirage node boot phase. + +```shell +mirage boot init [ENV_FILE] +``` + +Arguments: + +- `ENV_FILE` - path to .env file (required). + +Required environment variables in `ENV_FILE`: + +- `SGX_SERVER_URL` - SGX server URL. +- `DISK_MOUNTPOINT` - Mount point for storing data (BTRFS recommended). +- `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` configs. +- `ENDPOINT` - RPC endpoint of the network where Mirage Manager is deployed. +- `MANAGER_CONTRACTS` - SKALE Manager alias or address. +- `IMA_CONTRACTS` - IMA alias or address (_Note: Required by boot service, may not be used by Mirage itself_). +- `FILEBEAT_HOST` - URL/IP:Port of the Filebeat log server. +- `ENV_TYPE` - Environment type (e.g., 'mainnet-mirage', 'devnet-mirage'). + +Optional variables: + +- `MONITORING_CONTAINERS` - Enable monitoring containers (`cadvisor`, `node-exporter`). + +#### Mirage Boot Registration + +Register the Mirage node with Mirage Manager _during_ the boot phase. + +```shell +mirage boot register --name --ip --domain [--port ] +``` + +Required arguments: + +- `--name`/`-n` - Mirage node name. +- `--ip` - Public IP for RPC connections and consensus. +- `--domain`/`-d` - Mirage node domain name (e.g., `mirage1.example.com`). + +Optional arguments: + +- `--port`/`-p` - Base port for node sChains (default: `10000`). + +#### Mirage Boot Signature + +Get the node signature for a validator ID _during_ the boot phase. + +```shell +mirage boot signature +``` + +Arguments: + +- `VALIDATOR_ID` - The ID of the validator requesting the signature. + +#### Mirage Boot Migrate + +Migrate the Mirage node from the boot phase to the main phase (regular operation). + +```shell +mirage boot migrate [ENV_FILEPATH] [--yes] +``` + +Arguments: + +- `ENV_FILEPATH` - Path to the .env file. + +Options: + +- `--yes` - Migrate without confirmation. + +### Mirage Node commands + +> Prefix: `mirage node` + +Commands for managing a Mirage node during its regular operation (main phase). + +#### Mirage Node Initialization (Placeholder) + +Initialize the regular operation phase of the Mirage node. + +```shell +mirage node init +``` + +> **Note:** This command is currently a placeholder and not implemented. + +#### Mirage Node Registration (Placeholder) + +Register the node during regular operation. + +```shell +mirage node register +``` + +> **Note:** This command is currently a placeholder and not implemented. + +#### Mirage Node Update (Placeholder) + +Update the Mirage node during regular operation. ```shell -skale sync-node cleanup +mirage node update [ENV_FILEPATH] [--yes] [--unsafe] ``` +> **Note:** This command is currently a placeholder and not implemented. + +#### Mirage Node Signature + +Get the node signature for a validator ID during regular operation. + +```shell +mirage node signature +``` + +Arguments: + +- `VALIDATOR_ID` - The ID of the validator requesting the signature. + +#### Mirage Node Backup + +Generate a backup archive of the Mirage node's state. + +```shell +mirage node backup +``` + +Arguments: + +- `BACKUP_FOLDER_PATH` - Path to the folder where the backup file will be saved. + +#### Mirage Node Restore + +Restore a Mirage node from a backup archive. + +```shell +mirage node restore [--config-only] +``` + +Arguments: + +- `BACKUP_PATH` - Path to the archive. +- `ENV_FILE` - Path to the .env file for the restored node configuration. + Options: -- `--yes` - cleanup without additional confirmation +- `--config-only` - Only restore configuration files. -> WARNING: This command will remove all data from the node. +--- ## Exit codes @@ -604,6 +847,8 @@ Exit codes conventions for SKALE CLI tools `*` - `validator-cli` only\ `**` - `node-cli` only +--- + ## Development ### Setup repo @@ -616,19 +861,30 @@ pip install -e .[dev] #### Generate info.py locally +Specify the build type (`normal`, `sync`, or `mirage`): + ```shell +# Example for Standard build ./scripts/generate_info.sh 1.0.0 my-branch normal + +# Example for Sync build +./scripts/generate_info.sh 1.0.0 my-branch sync + +# Example for Mirage build +./scripts/generate_info.sh 1.0.0 my-branch mirage ``` -##### Add linting git hook +#### Add linting git hook In file `.git/hooks/pre-commit` add: ```shell #!/bin/sh -ruff check +ruff check . && ruff format . ``` +Make it executable: `chmod +x .git/hooks/pre-commit` + ### Debugging Run commands in dev mode: diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py index 81da33b7..a7a61c63 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/mirage_boot.py @@ -80,8 +80,7 @@ def signature_boot(validator_id): prompt='Are you sure you want to update SKALE node software?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) -@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) @click.argument('env_file') @streamed_cmd -def migrate_boot(env_file, pull_config_for_schain, unsafe_ok): - migrate(env_file, pull_config_for_schain, unsafe_ok) +def migrate_boot(env_file, pull_config_for_schain): + migrate(env_file, pull_config_for_schain) diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index a6f9eacd..0925600a 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -56,7 +56,7 @@ def init(env_filepath: str) -> None: @check_inited @check_user -def migrate(env_filepath: str, pull_config_for_schain: str, unsafe_ok: bool = False) -> None: +def migrate(env_filepath: str, pull_config_for_schain: str) -> None: logger.info('Node update started') env = compose_node_env( env_filepath, From 004ab1216995f0bbf2422dd3b7a97d74a415f22e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 21 Apr 2025 16:58:47 +0100 Subject: [PATCH 030/332] Eliminated the step to make the pre-commit hook executable, as we'll need to investigate pre-commit in future. --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index e2414498..2b0ab262 100644 --- a/README.md +++ b/README.md @@ -883,8 +883,6 @@ In file `.git/hooks/pre-commit` add: ruff check . && ruff format . ``` -Make it executable: `chmod +x .git/hooks/pre-commit` - ### Debugging Run commands in dev mode: From d50df0f36289a8d3dcd3c25ad2cc7777718bacc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 21 Apr 2025 17:28:46 +0100 Subject: [PATCH 031/332] Fix circular import for NodeType Build broke because info.py need NodeType from core.node, but core.node need env.py, and env.py need NodeType from core.node. Make circle. Bad. Move NodeType to utils/node_types.py. Update imports in core.node, configs.env, generate_info.sh. No more circle. Build work now. --- node_cli/configs/env.py | 2 +- node_cli/core/mirage_boot.py | 3 ++- node_cli/core/mirage_node.py | 3 ++- node_cli/core/nginx.py | 2 +- node_cli/core/node.py | 7 +------ node_cli/main.py | 2 +- node_cli/operations/base.py | 2 +- node_cli/utils/node_type.py | 26 ++++++++++++++++++++++++++ scripts/generate_info.sh | 2 +- tests/core/core_node_test.py | 3 ++- 10 files changed, 38 insertions(+), 14 deletions(-) create mode 100644 node_cli/utils/node_type.py diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index ad62e932..8f3fab32 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -24,7 +24,7 @@ from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH from node_cli.configs.alias_address_validation import validate_env_alias_or_address, ContractType -from node_cli.core.node import NodeType +from node_cli.utils.node_type import NodeType from node_cli.utils.helper import error_exit SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index 0925600a..e98b1ae7 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -23,11 +23,12 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.resources import update_resource_allocation -from node_cli.core.node import NodeType, compose_node_env, is_base_containers_alive +from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.operations import init_mirage_boot_op, migrate_mirage_boot_op from node_cli.utils.decorators import check_not_inited, check_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit +from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_node_cmd_error from node_cli.utils.texts import Texts diff --git a/node_cli/core/mirage_node.py b/node_cli/core/mirage_node.py index a02ca7e5..c72ca440 100644 --- a/node_cli/core/mirage_node.py +++ b/node_cli/core/mirage_node.py @@ -23,10 +23,11 @@ from node_cli.configs import SKALE_DIR, RESTORE_SLEEP_TIMEOUT from node_cli.core.host import save_env_params -from node_cli.core.node import NodeType, compose_node_env +from node_cli.core.node import compose_node_env from node_cli.utils.decorators import check_not_inited from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit +from node_cli.utils.node_type import NodeType from node_cli.utils.texts import Texts from node_cli.operations import restore_mirage_op diff --git a/node_cli/core/nginx.py b/node_cli/core/nginx.py index 0e6d0b7d..84ba8f62 100644 --- a/node_cli/core/nginx.py +++ b/node_cli/core/nginx.py @@ -22,7 +22,7 @@ from node_cli.cli.info import TYPE from node_cli.configs import NODE_CERTS_PATH, NGINX_TEMPLATE_FILEPATH, NGINX_CONFIG_FILEPATH -from node_cli.core.node import NodeType +from node_cli.utils.node_type import NodeType from node_cli.utils.docker_utils import restart_nginx_container, docker_client from node_cli.utils.helper import process_template diff --git a/node_cli/core/node.py b/node_cli/core/node.py index f91e33cc..b492c439 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -80,6 +80,7 @@ BASE_SYNC_COMPOSE_SERVICES, BASE_MIRAGE_COMPOSE_SERVICES, ) +from node_cli.utils.node_type import NodeType from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 @@ -101,12 +102,6 @@ class NodeStatuses(Enum): NOT_CREATED = 5 -class NodeType(Enum): - REGULAR = 0 - SYNC = 1 - MIRAGE = 2 - - def is_update_safe(node_type: NodeType = NodeType.REGULAR) -> bool: if not is_admin_running(node_type): if node_type == NodeType.SYNC: diff --git a/node_cli/main.py b/node_cli/main.py index 7d0c0af4..d1d152ec 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -41,7 +41,7 @@ from node_cli.cli.mirage_boot import mirage_boot_cli from node_cli.cli.mirage_node import mirage_node_cli from node_cli.core.host import init_logs_dir -from node_cli.core.node import NodeType +from node_cli.utils.node_type import NodeType from node_cli.configs import LONG_LINE from node_cli.utils.helper import safe_load_texts, init_default_logger from node_cli.utils.helper import error_exit diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 131f8346..ce4c92c3 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -39,7 +39,6 @@ ) from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config -from node_cli.core.node import NodeType from node_cli.core.node_options import NodeOptions from node_cli.core.resources import update_resource_allocation, init_shared_space_volume from node_cli.core.schains import ( @@ -67,6 +66,7 @@ ) from node_cli.utils.helper import str_to_bool, rm_dir from node_cli.utils.meta import get_meta_info, update_meta +from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks diff --git a/node_cli/utils/node_type.py b/node_cli/utils/node_type.py new file mode 100644 index 00000000..341479f4 --- /dev/null +++ b/node_cli/utils/node_type.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from enum import Enum + + +class NodeType(Enum): + REGULAR = 0 + SYNC = 1 + MIRAGE = 2 diff --git a/scripts/generate_info.sh b/scripts/generate_info.sh index f2d2cb2a..f5933f91 100755 --- a/scripts/generate_info.sh +++ b/scripts/generate_info.sh @@ -50,7 +50,7 @@ esac rm -f "$DIST_INFO_FILEPATH" touch "$DIST_INFO_FILEPATH" -echo "from node_cli.core.node import NodeType" >> "$DIST_INFO_FILEPATH" +echo "from node_cli.utils.node_type import NodeType" >> "$DIST_INFO_FILEPATH" echo "" >> "$DIST_INFO_FILEPATH" echo "BUILD_DATETIME = '$CURRENT_DATETIME'" >> "$DIST_INFO_FILEPATH" diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 1fdc57fd..50b4ec43 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -11,9 +11,10 @@ from node_cli.configs import NODE_DATA_PATH from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH -from node_cli.core.node import NodeType, get_base_containers_amount, is_base_containers_alive +from node_cli.core.node import get_base_containers_amount, is_base_containers_alive from node_cli.core.node import init, pack_dir, update, is_update_safe from node_cli.utils.meta import CliMeta +from node_cli.utils.node_type import NodeType from tests.helper import response_mock, safe_update_api_response, subprocess_run_mock from tests.resources_test import BIG_DISK_SIZE From 2d896ae9506a029b486d59f5a72c6500b6514101 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 21 Apr 2025 17:43:41 +0100 Subject: [PATCH 032/332] Fixes incorrect NodeType import in `docker_utils.py` --- node_cli/utils/docker_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index a80d0289..56dc1c22 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -36,7 +36,7 @@ SGX_CERTIFICATES_DIR_NAME, NGINX_CONTAINER_NAME, ) -from node_cli.core.node import NodeType +from node_cli.utils.node_type import NodeType logger = logging.getLogger(__name__) From 826522f490cab87331eb56af1d4bae295b2915f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 22 Apr 2025 13:04:12 +0100 Subject: [PATCH 033/332] Replaced sync_node flag with node_type in build_env_params. --- tests/configs/configs_env_validate_test.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index ed778d40..897473fe 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -22,6 +22,7 @@ ContractType, ) from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.node_type import NodeType ENDPOINT = 'http://localhost:8545' @@ -66,7 +67,7 @@ def test_load_env_file_not_readable(tmp_path): @pytest.mark.parametrize('sync_node,has_schain_name', [(True, True), (False, False)]) def test_build_env_params_sync_and_non_sync(sync_node, has_schain_name): - params = build_env_params(sync_node=sync_node) + params = build_env_params(node_type=NodeType.SYNC if sync_node else NodeType.REGULAR) assert ('SCHAIN_NAME' in params) == has_schain_name From b435e83558d3704f3f53d49634e2dedf292a9a6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 22 Apr 2025 13:26:58 +0100 Subject: [PATCH 034/332] Updated helper-scripts --- helper-scripts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/helper-scripts b/helper-scripts index 84b57271..7c6ccee7 160000 --- a/helper-scripts +++ b/helper-scripts @@ -1 +1 @@ -Subproject commit 84b572717eef72feb3c901ea8817f9dcddbca8bb +Subproject commit 7c6ccee7599f30ddec3cf0d7747dd031fd57cb27 From 512eee8cdaf7828d53bbdfb2a8e283ac2f3df9e2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 22 Apr 2025 13:44:44 +0100 Subject: [PATCH 035/332] Updated README Development section with more complete install instructions and fixed pre-commit hook --- README.md | 36 +++++++++++++++++++++++++++++++----- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 2b0ab262..11e06eae 100644 --- a/README.md +++ b/README.md @@ -853,10 +853,36 @@ Exit codes conventions for SKALE CLI tools ### Setup repo +#### Dependencies + +- Python 3.11 +- Git + +#### Clone the repository + +Clone with HTTPS: + +```shell +git clone https://github.com/skalenetwork/node-cli.git +``` + +Or with SSH: + +```shell +git clone git@github.com:skalenetwork/node-cli.git +``` + +#### Create and source virtual environment + +```shell +python3.11 -m venv venv +source venv/bin/activate +``` + #### Install development dependencies ```shell -pip install -e .[dev] +pip install -e ".[dev]" ``` #### Generate info.py locally @@ -880,15 +906,15 @@ In file `.git/hooks/pre-commit` add: ```shell #!/bin/sh -ruff check . && ruff format . +./venv/bin/ruff check . ``` -### Debugging +> **Note:** This hook assumes your virtual environment is named 'venv' and is located at the root of the repository. -Run commands in dev mode: +Make the hook executable: ```shell -ENV=dev python main.py YOUR_COMMAND +chmod +x .git/hooks/pre-commit ``` ## Contributing From 7dbab516d535bd02c5f40a163c73ae9ae860c050 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 22 Apr 2025 15:11:02 +0100 Subject: [PATCH 036/332] Updated static params handling by Checkers to ignore requirements that have value equal to 'disabled' --- node_cli/core/checks.py | 2 +- tests/.skale/config/static_params.yaml | 57 ++++++++++++++++++++++++++ tests/conftest.py | 40 ++++++++++++++++++ 3 files changed, 98 insertions(+), 1 deletion(-) diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index 7ffe3a81..c4afee68 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -173,7 +173,7 @@ def get_checks(self, check_type: CheckType = CheckType.ALL) -> FuncList: type(self), predicate=lambda m: inspect.isfunction(m) and getattr(m, '_check_type', None) in allowed_types - and self.requirements.get(m.__name__, None) is not None, + and self.requirements.get(m.__name__, None) != 'disabled', ) return [functools.partial(m[1], self) for m in methods] diff --git a/tests/.skale/config/static_params.yaml b/tests/.skale/config/static_params.yaml index a0ea143f..09b51ae6 100644 --- a/tests/.skale/config/static_params.yaml +++ b/tests/.skale/config/static_params.yaml @@ -16,6 +16,63 @@ common: db_storage: 0.2 # leveldb may use x2 storage, so 0.4 divided by 2, actually using 0.4 shared_space_coefficient: 1 envs: + mainnet-mirage: + server: + cpu_total: 8 + cpu_physical: 1 + memory: 32000000000 + swap: 16000000000 + disk: 500000000000 + + package: + iptables-persistent: 1.0.4 + btrfs-progs: 4.15.1 + lsof: "4.89" + psmisc: 23.1-1 + lvm2: disabled + + docker: + docker-api: 1.41.0 + docker-engine: 20.10.7 + docker-compose: 1.27.4 + + schain: + snapshotIntervalSec: 86400 + emptyBlockIntervalMs: 10000 + snapshotDownloadTimeout: 18000 + snapshotDownloadInactiveTimeout: 120 + + schain_cmd: ["-v 2", "--aa no"] + + devnet-mirage: + server: + cpu_total: 1 + cpu_physical: 1 + memory: 2000000000 + swap: 2000000000 + disk: 80000000000 + + package: + iptables-persistent: 1.0.4 + btrfs-progs: 4.15.1 + lsof: "4.89" + psmisc: 23.1-1 + lvm2: disabled + + docker: + docker-api: 1.41.0 + docker-engine: 20.10.7 + docker-compose: 1.27.4 + + schain: + snapshotIntervalSec: 86400 + emptyBlockIntervalMs: 10000 + snapshotDownloadTimeout: 18000 + snapshotDownloadInactiveTimeout: 120 + + schain_cmd: + ["-v 3", "--web3-trace", "--enable-debug-behavior-apis", "--aa no"] + mainnet: server: cpu_total: 8 diff --git a/tests/conftest.py b/tests/conftest.py index 88691472..03adb3c1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,6 +49,46 @@ TEST_ENV_PARAMS = """ +mainnet-mirage: + server: + cpu_total: 8 + cpu_physical: 1 + memory: 32000000000 + swap: 16000000000 + disk: 500000000000 + + package: + iptables-persistent: 1.0.4 + btrfs-progs: 4.15.1 + lsof: "4.89" + psmisc: 23.1-1 + lvm2: disabled + + docker: + docker-api: 1.41.0 + docker-engine: 20.10.7 + docker-compose: 1.27.4 + + devnet-mirage: + server: + cpu_total: 1 + cpu_physical: 1 + memory: 2000000000 + swap: 2000000000 + disk: 80000000000 + + package: + iptables-persistent: 1.0.4 + btrfs-progs: 4.15.1 + lsof: "4.89" + psmisc: 23.1-1 + lvm2: disabled + + docker: + docker-api: 1.41.0 + docker-engine: 20.10.7 + docker-compose: 1.27.4 + mainnet: server: cpu_total: 4 From bcc3a7fb964fbbfb7899a6b11692b177e57c118e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 22 Apr 2025 16:33:34 +0100 Subject: [PATCH 037/332] Refactored `is_update_safe` tests with improved coverage. --- tests/core/core_node_test.py | 93 +++++++++++++++++++++++++----------- 1 file changed, 66 insertions(+), 27 deletions(-) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 50b4ec43..8d345039 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -186,30 +186,69 @@ def test_update_node(mocked_g_config, resource_file): assert result is None -def test_is_update_safe(): - assert is_update_safe() - assert is_update_safe(node_type=NodeType.SYNC) - - with mock.patch('node_cli.core.node.is_admin_running', return_value=True): - with mock.patch('node_cli.core.node.is_api_running', return_value=True): - assert not is_update_safe() - assert is_update_safe(node_type=NodeType.SYNC) - - with mock.patch('node_cli.core.node.is_admin_running', return_value=True): - assert is_update_safe() - assert not is_update_safe(node_type=NodeType.SYNC) - - with mock.patch('node_cli.utils.docker_utils.is_container_running', return_value=True): - with mock.patch( - 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() - ): - assert is_update_safe() - - with mock.patch('node_cli.utils.helper.requests.get', return_value=safe_update_api_response()): - assert is_update_safe() - - with mock.patch('node_cli.utils.docker_utils.is_container_running', return_value=True): - with mock.patch( - 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response(safe=False) - ): - assert not is_update_safe() +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) +@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=False) +@mock.patch('node_cli.utils.docker_utils.is_api_running', return_value=False) +@mock.patch('node_cli.utils.helper.requests.get') +def test_is_update_safe_when_admin_and_api_not_running( + mock_requests_get, mock_is_api_running, mock_is_admin_running, node_type +): + assert is_update_safe(node_type=node_type) is True + mock_requests_get.assert_not_called() + + +@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=False) +@mock.patch('node_cli.utils.docker_utils.is_api_running', return_value=True) +@mock.patch('node_cli.utils.helper.requests.get') +def test_is_update_safe_when_admin_not_running_for_sync( + mock_requests_get, mock_is_api_running, mock_is_admin_running +): + assert is_update_safe(node_type=NodeType.SYNC) is True + mock_requests_get.assert_not_called() + + +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) +@pytest.mark.parametrize( + 'api_is_safe, expected_result', + [(True, True), (False, False)], + ids=['api_safe', 'api_unsafe'], +) +@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=True) +@mock.patch('node_cli.utils.helper.requests.get') +def test_is_update_safe_when_admin_running( + mock_requests_get, mock_is_admin_running, api_is_safe, expected_result, node_type +): + mock_requests_get.return_value = safe_update_api_response(safe=api_is_safe) + assert is_update_safe(node_type=node_type) is expected_result + mock_requests_get.assert_called_once() + + +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.MIRAGE]) +@pytest.mark.parametrize( + 'api_is_safe, expected_result', + [(True, True), (False, False)], + ids=['api_safe', 'api_unsafe'], +) +@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=False) +@mock.patch('node_cli.utils.docker_utils.is_api_running', return_value=True) +@mock.patch('node_cli.utils.helper.requests.get') +def test_is_update_safe_when_only_api_running_for_regular( + mock_requests_get, + mock_is_api_running, + mock_is_admin_running, + api_is_safe, + expected_result, + node_type, +): + mock_requests_get.return_value = safe_update_api_response(safe=api_is_safe) + assert is_update_safe(node_type=node_type) is expected_result + mock_requests_get.assert_called_once() + + +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) +@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=True) +@mock.patch('node_cli.utils.helper.requests.get') +def test_is_update_safe_when_api_call_fails(mock_requests_get, mock_is_admin_running, node_type): + mock_requests_get.side_effect = requests.exceptions.ConnectionError('Test connection error') + assert is_update_safe(node_type=node_type) is False + mock_requests_get.assert_called_once() From 09b58907bff8bf14b9744ac8c2e3531fd277bff1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 22 Apr 2025 16:58:51 +0100 Subject: [PATCH 038/332] Fixed `is_update_safe` tests mock path --- tests/core/core_node_test.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 8d345039..4f67139f 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -11,8 +11,14 @@ from node_cli.configs import NODE_DATA_PATH from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH -from node_cli.core.node import get_base_containers_amount, is_base_containers_alive -from node_cli.core.node import init, pack_dir, update, is_update_safe +from node_cli.core.node import ( + get_base_containers_amount, + is_base_containers_alive, + init, + pack_dir, + update, + is_update_safe, +) from node_cli.utils.meta import CliMeta from node_cli.utils.node_type import NodeType @@ -187,8 +193,8 @@ def test_update_node(mocked_g_config, resource_file): @pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) -@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=False) -@mock.patch('node_cli.utils.docker_utils.is_api_running', return_value=False) +@mock.patch('node_cli.core.node.is_admin_running', return_value=False) +@mock.patch('node_cli.core.node.is_api_running', return_value=False) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_admin_and_api_not_running( mock_requests_get, mock_is_api_running, mock_is_admin_running, node_type @@ -197,8 +203,8 @@ def test_is_update_safe_when_admin_and_api_not_running( mock_requests_get.assert_not_called() -@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=False) -@mock.patch('node_cli.utils.docker_utils.is_api_running', return_value=True) +@mock.patch('node_cli.core.node.is_admin_running', return_value=False) +@mock.patch('node_cli.core.node.is_api_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_admin_not_running_for_sync( mock_requests_get, mock_is_api_running, mock_is_admin_running @@ -213,7 +219,7 @@ def test_is_update_safe_when_admin_not_running_for_sync( [(True, True), (False, False)], ids=['api_safe', 'api_unsafe'], ) -@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=True) +@mock.patch('node_cli.core.node.is_admin_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_admin_running( mock_requests_get, mock_is_admin_running, api_is_safe, expected_result, node_type @@ -229,8 +235,8 @@ def test_is_update_safe_when_admin_running( [(True, True), (False, False)], ids=['api_safe', 'api_unsafe'], ) -@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=False) -@mock.patch('node_cli.utils.docker_utils.is_api_running', return_value=True) +@mock.patch('node_cli.core.node.is_admin_running', return_value=False) +@mock.patch('node_cli.core.node.is_api_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_only_api_running_for_regular( mock_requests_get, @@ -246,7 +252,7 @@ def test_is_update_safe_when_only_api_running_for_regular( @pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) -@mock.patch('node_cli.utils.docker_utils.is_admin_running', return_value=True) +@mock.patch('node_cli.core.node.is_admin_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_api_call_fails(mock_requests_get, mock_is_admin_running, node_type): mock_requests_get.side_effect = requests.exceptions.ConnectionError('Test connection error') From f9f09c4c85e035abe3def7e5940be7bbfd19f3b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 22 Apr 2025 17:53:11 +0100 Subject: [PATCH 039/332] Changed `is_base_containers_alive` to check both `mirage_` and `skale_` prefixes for mirage node --- node_cli/core/node.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index b492c439..179634f3 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -422,15 +422,20 @@ def get_base_containers_amount(node_type: NodeType = NodeType.REGULAR): def is_base_containers_alive(node_type: NodeType = NodeType.REGULAR) -> bool: if node_type == NodeType.MIRAGE: - prefix = 'mirage_' + prefixes = ['mirage_', 'skale_'] else: - prefix = 'skale_' + prefixes = ['skale_'] dclient = docker.from_env() containers = dclient.containers.list() - skale_containers = list(filter(lambda c: c.name.startswith(prefix), containers)) + + alive_containers = [] + for prefix in prefixes: + prefix_containers = list(filter(lambda c: c.name.startswith(prefix), containers)) + alive_containers.extend(prefix_containers) + containers_amount = get_base_containers_amount(node_type) - return len(skale_containers) >= containers_amount + return len(alive_containers) >= containers_amount def get_node_info_plain(): From cc1150a316f72462b6f216f33d77418164482740 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Wed, 23 Apr 2025 17:23:33 +0100 Subject: [PATCH 040/332] Reworked env.py tests; Introduced explicit node type parameters to related functions. --- node_cli/cli/mirage_boot.py | 2 +- node_cli/cli/node.py | 21 +- node_cli/cli/resources_allocation.py | 3 +- node_cli/cli/schains.py | 3 +- node_cli/configs/env.py | 29 +- node_cli/core/node.py | 24 +- node_cli/core/resources.py | 9 +- node_cli/core/schains.py | 9 +- node_cli/operations/base.py | 2 +- tests/configs/configs_env_validate_test.py | 375 ++++++++++++--------- tests/core/core_node_test.py | 5 +- 11 files changed, 291 insertions(+), 191 deletions(-) diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py index a7a61c63..8130c253 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/mirage_boot.py @@ -77,7 +77,7 @@ def signature_boot(validator_id): is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to update SKALE node software?', + prompt='Are you sure you want to mirage node from Mirage Boot Phase to Mirage Main Phase?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) @click.argument('env_file') diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index ebddc5c8..10bbd17e 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -19,8 +19,9 @@ import click -from node_cli.core.node import configure_firewall_rules +from node_cli.cli.info import TYPE from node_cli.core.node import ( + configure_firewall_rules, get_node_signature, init, restore, @@ -36,11 +37,12 @@ run_checks, ) from node_cli.configs import DEFAULT_NODE_BASE_PORT -from node_cli.configs.env import ALLOWED_ENV_TYPES +from node_cli.configs.env import ALLOWED_SKALE_ENV_TYPES from node_cli.utils.decorators import check_inited from node_cli.utils.helper import abort_if_false, safe_load_texts, streamed_cmd, IP_TYPE from node_cli.utils.meta import get_meta_info from node_cli.utils.print_formatters import print_meta_info +from node_cli.utils.node_type import NodeType TEXTS = safe_load_texts() @@ -99,7 +101,12 @@ def init_node(env_file): @click.argument('env_file') @streamed_cmd def update_node(env_file, pull_config_for_schain, unsafe_ok): - update(env_file, pull_config_for_schain, unsafe_ok) + update( + env_filepath=env_file, + pull_config_for_schain=pull_config_for_schain, + node_type=NodeType.REGULAR, + unsafe_ok=unsafe_ok, + ) @node.command('signature', help='Get node signature for given validator id') @@ -166,7 +173,7 @@ def remove_node_from_maintenance(): @click.option('--unsafe', 'unsafe_ok', help='Allow unsafe turn-off', hidden=True, is_flag=True) @streamed_cmd def _turn_off(maintenance_on, unsafe_ok): - turn_off(maintenance_on, unsafe_ok) + turn_off(node_type=TYPE, maintenance_on=maintenance_on, unsafe_ok=unsafe_ok) @node.command('turn-on', help='Turn on the node') @@ -206,16 +213,16 @@ def _set_domain_name(domain): set_domain_name(domain) -@node.command(help='Check if node meet network requirements') +@node.command(help='Check if node meets network requirements') @click.option( '--network', '-n', - type=click.Choice(ALLOWED_ENV_TYPES), + type=click.Choice(ALLOWED_SKALE_ENV_TYPES), default='mainnet', help='Network to check', ) def check(network): - run_checks(network) + run_checks(node_type=NodeType.REGULAR, network=network) @node.command(help='Reconfigure nftables rules') diff --git a/node_cli/cli/resources_allocation.py b/node_cli/cli/resources_allocation.py index 768690ed..9a518a2d 100644 --- a/node_cli/cli/resources_allocation.py +++ b/node_cli/cli/resources_allocation.py @@ -25,6 +25,7 @@ generate_resource_allocation_config, ) from node_cli.utils.helper import abort_if_false, safe_load_texts +from node_cli.utils.node_type import NodeType TEXTS = safe_load_texts() @@ -59,4 +60,4 @@ def show(): ) @click.option('--force', '-f', is_flag=True, help='Rewrite if already exists') def generate(env_file, force): - generate_resource_allocation_config(env_file=env_file, force=force) + generate_resource_allocation_config(node_type=NodeType.REGULAR, env_file=env_file, force=force) diff --git a/node_cli/cli/schains.py b/node_cli/cli/schains.py index 188b39fe..803ea754 100644 --- a/node_cli/cli/schains.py +++ b/node_cli/cli/schains.py @@ -32,6 +32,7 @@ show_schains, toggle_schain_repair_mode, ) +from node_cli.cli.info import TYPE @click.group() @@ -107,4 +108,4 @@ def info_(schain_name: str, json_format: bool) -> None: def restore( schain_name: str, snapshot_path: str, schain_type: str, env_type: Optional[str] ) -> None: - restore_schain_from_snapshot(schain_name, snapshot_path) + restore_schain_from_snapshot(schain_name, snapshot_path, node_type=TYPE) diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 8f3fab32..05b8ba5a 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -89,14 +89,14 @@ def absent_required_params(params: Dict[str, str]) -> List[str]: def get_validated_env_config( + node_type: NodeType, env_filepath: str = SKALE_DIR_ENV_FILEPATH, - node_type: NodeType = NodeType.REGULAR, is_mirage_boot: bool = False, ) -> Dict[str, str]: load_env_file(env_filepath) params = build_env_params(node_type=node_type, is_mirage_boot=is_mirage_boot) populate_env_params(params) - validate_env_params(params) + validate_env_params(node_type=node_type, params=params) return params @@ -106,9 +106,9 @@ def load_env_file(env_filepath: str) -> None: def build_env_params( - node_type: NodeType = NodeType.REGULAR, is_mirage_boot: bool = False + node_type: NodeType, + is_mirage_boot: bool = False, ) -> Dict[str, str]: - """Return environment variables dictionary with keys based on node type.""" if node_type == NodeType.MIRAGE and is_mirage_boot: params = REQUIRED_PARAMS_MIRAGE_BOOT.copy() elif node_type == NodeType.MIRAGE: @@ -129,16 +129,27 @@ def populate_env_params(params: Dict[str, str]) -> None: params[key] = str(env_value) -def validate_env_params(params: Dict[str, str]) -> None: +def validate_env_params( + node_type: NodeType, + params: Dict[str, str], +) -> None: missing = absent_required_params(params) if missing: error_exit(f'Missing required parameters: {missing}') - validate_env_type(params['ENV_TYPE']) + validate_env_type(node_type=node_type, env_type=params['ENV_TYPE']) endpoint = params['ENDPOINT'] validate_env_alias_or_address(params['IMA_CONTRACTS'], ContractType.IMA, endpoint) validate_env_alias_or_address(params['MANAGER_CONTRACTS'], ContractType.MANAGER, endpoint) -def validate_env_type(env_type: str) -> None: - if env_type not in ALLOWED_ENV_TYPES: - error_exit(f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. Actual: "{env_type}"') +def validate_env_type(node_type: NodeType, env_type: str) -> None: + allowed_env_types_for_node_type = list() + if node_type == NodeType.MIRAGE: + allowed_env_types_for_node_type = ALLOWED_MIRAGE_ENV_TYPES + else: + allowed_env_types_for_node_type = ALLOWED_SKALE_ENV_TYPES + + if env_type not in allowed_env_types_for_node_type: + error_exit( + f'Allowed ENV_TYPE values are {allowed_env_types_for_node_type}. Actual: "{env_type}"' + ) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 179634f3..2436c674 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -232,11 +232,11 @@ def compose_node_env( is_mirage_boot: bool = False, ) -> dict[str, str]: if env_filepath is not None: - env_params = get_validated_env_config(env_filepath, node_type=node_type) + env_params = get_validated_env_config(node_type=node_type, env_filepath=env_filepath) if save: save_env_params(env_filepath) else: - env_params = get_validated_env_config(INIT_ENV_FILEPATH, node_type=node_type) + env_params = get_validated_env_config(node_type=node_type, env_filepath=INIT_ENV_FILEPATH) if node_type == NodeType.SYNC or node_type == NodeType.MIRAGE: mnt_dir = SCHAINS_MNT_DIR_SINGLE_CHAIN @@ -265,8 +265,13 @@ def compose_node_env( @check_inited @check_user -def update(env_filepath: str, pull_config_for_schain: str, unsafe_ok: bool = False) -> None: - if not unsafe_ok and not is_update_safe(): +def update( + env_filepath: str, + pull_config_for_schain: Optional[str], + node_type: NodeType, + unsafe_ok: bool = False, +) -> None: + if not unsafe_ok and not is_update_safe(node_type=node_type): error_msg = 'Cannot update safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) @@ -386,14 +391,14 @@ def set_maintenance_mode_off(): @check_inited @check_user -def turn_off(maintenance_on: bool = False, unsafe_ok: bool = False) -> None: +def turn_off(node_type: NodeType, maintenance_on: bool = False, unsafe_ok: bool = False) -> None: if not unsafe_ok and not is_update_safe(): error_msg = 'Cannot turn off safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) if maintenance_on: set_maintenance_mode_on() env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False) - turn_off_op(env=env) + turn_off_op(node_type=node_type, env=env) @check_inited @@ -476,6 +481,7 @@ def set_domain_name(domain_name): def run_checks( + node_type: NodeType, network: str = 'mainnet', container_config_path: str = CONTAINER_CONFIG_PATH, disk: Optional[str] = None, @@ -485,13 +491,13 @@ def run_checks( return if disk is None: - env = get_validated_env_config() + env = get_validated_env_config(node_type=node_type) disk = env['DISK_MOUNTPOINT'] failed_checks = run_host_checks(disk, network, container_config_path) if not failed_checks: - print('Requirements checking succesfully finished!') + print('Requirements checking successfully finished!') else: - print('Node is not fully meet the requirements!') + print('Node does not fully meet the requirements!') print_failed_requirements_checks(failed_checks) diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 5762ebad..6dd799bc 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -28,6 +28,7 @@ from node_cli.utils.docker_utils import ensure_volume from node_cli.utils.schain_types import SchainTypes from node_cli.utils.helper import write_json, read_json, run_cmd, safe_load_yml +from node_cli.utils.node_type import NodeType from node_cli.configs import ALLOCATION_FILEPATH, STATIC_PARAMS_FILEPATH, SNAPSHOTS_SHARED_VOLUME from node_cli.configs.resource_allocation import ( RESOURCE_ALLOCATION_FILEPATH, @@ -91,13 +92,17 @@ def compose_resource_allocation_config(env_type: str, params_by_env_type: Dict = } -def generate_resource_allocation_config(env_file, force=False) -> None: +def generate_resource_allocation_config( + env_file, + force=False, + node_type: NodeType = NodeType.REGULAR, +) -> None: if not force and os.path.isfile(RESOURCE_ALLOCATION_FILEPATH): msg = 'Resource allocation file already exists' logger.debug(msg) print(msg) return - env_params = get_validated_env_config(env_file) + env_params = get_validated_env_config(node_type=node_type, env_filepath=env_file) if env_params is None: return logger.info('Generating resource allocation file ...') diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 34f05cea..957e02df 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -27,6 +27,7 @@ ) from node_cli.utils.docker_utils import ensure_volume, is_volume_exists from node_cli.utils.helper import read_json, run_cmd, save_json +from node_cli.utils.node_type import NodeType from lvmpy.src.core import mount, volume_mountpoint @@ -182,10 +183,14 @@ def fillin_snapshot_folder(src_path: str, block_number: int) -> None: def restore_schain_from_snapshot( - schain: str, snapshot_path: str, env_type: Optional[str] = None, schain_type: str = 'medium' + schain: str, + snapshot_path: str, + node_type: NodeType, + env_type: Optional[str] = None, + schain_type: str = 'medium', ) -> None: if env_type is None: - env_config = get_validated_env_config() + env_config = get_validated_env_config(node_type=node_type) env_type = env_config['ENV_TYPE'] ensure_schain_volume(schain, schain_type, env_type) block_number = get_block_number_from_path(snapshot_path) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index ce4c92c3..9c19224d 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -337,7 +337,7 @@ def update_sync(env_filepath: str, env: Dict) -> bool: return True -def turn_off(env: dict, node_type: NodeType = NodeType.REGULAR) -> None: +def turn_off(env: dict, node_type: NodeType) -> None: logger.info('Turning off the node...') compose_rm(env=env, node_type=node_type) remove_dynamic_containers() diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 897473fe..d6fa6afc 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -2,6 +2,7 @@ from typing import Optional import pytest import requests +import mock from node_cli.configs.env import ( absent_required_params, @@ -12,6 +13,11 @@ validate_env_params, validate_env_type, ALLOWED_ENV_TYPES, + ALLOWED_MIRAGE_ENV_TYPES, + REQUIRED_PARAMS_SKALE, + REQUIRED_PARAMS_MIRAGE_BOOT, + REQUIRED_PARAMS_MIRAGE, + OPTIONAL_PARAMS, ) from node_cli.configs.alias_address_validation import ( validate_env_alias_or_address, @@ -55,22 +61,6 @@ def test_load_env_file_nonexistent(): assert excinfo.value.code == CLIExitCodes.FAILURE.value -def test_load_env_file_not_readable(tmp_path): - # Create a temporary file and remove read permissions - env_file = tmp_path / 'test.env' - env_file.write_text('KEY=value') - os.chmod(env_file, 0o000) - with pytest.raises(PermissionError): - load_env_file(str(env_file)) - os.chmod(env_file, 0o644) # reset permissions - - -@pytest.mark.parametrize('sync_node,has_schain_name', [(True, True), (False, False)]) -def test_build_env_params_sync_and_non_sync(sync_node, has_schain_name): - params = build_env_params(node_type=NodeType.SYNC if sync_node else NodeType.REGULAR) - assert ('SCHAIN_NAME' in params) == has_schain_name - - def test_populate_env_params_updates_from_environ(monkeypatch): params = {'FOO': ''} monkeypatch.setenv('FOO', 'bar') @@ -78,14 +68,56 @@ def test_populate_env_params_updates_from_environ(monkeypatch): assert params['FOO'] == 'bar' -@pytest.mark.parametrize('env_type', ['mainnet', 'testnet', 'qanet', 'devnet']) +@pytest.mark.parametrize( + 'node_type, is_mirage_boot, expected_keys, unexpected_keys', + [ + ( + NodeType.REGULAR, + False, + REQUIRED_PARAMS_SKALE.keys(), + {'SCHAIN_NAME'}, + ), + ( + NodeType.SYNC, + False, + {'SCHAIN_NAME', 'DOCKER_LVMPY_STREAM'} | REQUIRED_PARAMS_SKALE.keys(), + set(), + ), + ( + NodeType.MIRAGE, + True, + REQUIRED_PARAMS_MIRAGE_BOOT.keys(), + {'DOCKER_LVMPY_STREAM', 'SCHAIN_NAME'}, + ), + ( + NodeType.MIRAGE, + False, + REQUIRED_PARAMS_MIRAGE.keys(), + {'IMA_CONTRACTS', 'DOCKER_LVMPY_STREAM', 'SCHAIN_NAME'}, + ), + ], + ids=['regular', 'sync', 'mirage_boot', 'mirage_regular'], +) +def test_build_env_params_keys(node_type, is_mirage_boot, expected_keys, unexpected_keys): + params = build_env_params(node_type=node_type, is_mirage_boot=is_mirage_boot) + param_keys = set(params.keys()) + + all_expected = set(expected_keys) | set(OPTIONAL_PARAMS.keys()) + missing_expected = all_expected - param_keys + assert not missing_expected, f'Missing expected keys: {missing_expected}' + + found_unexpected = set(unexpected_keys) & param_keys + assert not found_unexpected, f'Found unexpected keys: {found_unexpected}' + + +@pytest.mark.parametrize('env_type', ALLOWED_ENV_TYPES) def test_valid_env_types(env_type): - validate_env_type(env_type) + validate_env_type(node_type=NodeType.REGULAR, env_type=env_type) def test_invalid_env_type(): with pytest.raises(SystemExit) as excinfo: - validate_env_type('invalid') + validate_env_type(node_type=NodeType.REGULAR, env_type='invalid') assert excinfo.value.code == CLIExitCodes.FAILURE.value @@ -96,8 +128,7 @@ def fake_post(url, json): return fake_response monkeypatch.setattr(requests, 'post', fake_post) - chain_id = get_chain_id('http://localhost:8545') - assert chain_id == 1 + assert get_chain_id(ENDPOINT) == 1 def test_get_chain_id_failure(monkeypatch): @@ -107,71 +138,64 @@ def fake_post(url, json): return fake_response monkeypatch.setattr(requests, 'post', fake_post) - with pytest.raises(SystemExit) as excinfo: - get_chain_id('http://localhost:8545') - assert excinfo.value.code == CLIExitCodes.FAILURE.value + with pytest.raises(SystemExit): + get_chain_id(ENDPOINT) -def test_get_network_metadata_success(requests_mock): - metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - requests_mock.get(metadata_url, json=metadata, status_code=200) - result = get_network_metadata() - assert result == metadata - - -def test_get_network_metadata_failure(requests_mock): - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - requests_mock.get(metadata_url, status_code=404) - with pytest.raises(SystemExit) as excinfo: - get_network_metadata() - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - -def test_validate_contract_address_success(requests_mock): - requests_mock.post(ENDPOINT, json={'result': '0x123'}) - validate_contract_address('0x' + 'a' * 40, ENDPOINT) - - -def test_validate_contract_address_no_code(requests_mock): - requests_mock.post(ENDPOINT, json={'result': '0x'}) - with pytest.raises(SystemExit) as excinfo: - validate_contract_address('0x' + 'a' * 40, ENDPOINT) - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - -def test_validate_contract_alias_success(requests_mock): +@pytest.mark.parametrize( + 'metadata,status_code,should_raise', + [ + ({'networks': [{'chainId': 1, 'path': 'mainnet'}]}, 200, False), + (None, 404, True), + ], +) +def test_get_network_metadata(requests_mock, metadata, status_code, should_raise): + metadata_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/metadata.json' + requests_mock.get(metadata_url, json=metadata, status_code=status_code) + + if should_raise: + with pytest.raises(SystemExit): + get_network_metadata() + else: + assert get_network_metadata() == metadata + + +@pytest.mark.parametrize( + 'code,should_raise', + [ + ('0x123', False), + ('0x', True), + ], +) +def test_validate_contract_address(requests_mock, code, should_raise): + requests_mock.post(ENDPOINT, json={'result': code}) + addr = '0x' + 'a' * 40 + if should_raise: + with pytest.raises(SystemExit): + validate_contract_address(addr, ENDPOINT) + else: + validate_contract_address(addr, ENDPOINT) + + +@pytest.mark.parametrize( + 'networks,should_raise', + [ + ([{'chainId': 1, 'path': 'mainnet'}], False), + ([], True), + ], +) +def test_validate_contract_alias(requests_mock, networks, should_raise): requests_mock.post(ENDPOINT, json={'result': '0x1'}) - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} - requests_mock.get(metadata_url, json=metadata, status_code=200) - alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/skale-manager/test-alias.json' - ) - requests_mock.get(alias_url, status_code=200) - validate_contract_alias('test-alias', ContractType.MANAGER, ENDPOINT) + metadata_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/metadata.json' + requests_mock.get(metadata_url, json={'networks': networks}, status_code=200) - -def test_validate_contract_alias_network_missing(requests_mock): - requests_mock.post(ENDPOINT, json={'result': '0x1'}) - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - requests_mock.get(metadata_url, json={'networks': []}, status_code=200) - with pytest.raises(SystemExit) as excinfo: + if not should_raise: + alias_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/mainnet/skale-manager/test-alias.json' + requests_mock.get(alias_url, status_code=200) validate_contract_alias('test-alias', ContractType.MANAGER, ENDPOINT) - assert excinfo.value.code == CLIExitCodes.FAILURE.value + else: + with pytest.raises(SystemExit): + validate_contract_alias('test-alias', ContractType.MANAGER, ENDPOINT) def test_validate_env_alias_or_address_with_address(requests_mock): @@ -182,90 +206,129 @@ def test_validate_env_alias_or_address_with_address(requests_mock): def test_validate_env_alias_or_address_with_alias(requests_mock): requests_mock.post(ENDPOINT, json={'result': '0x1'}) - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) + metadata_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/metadata.json' metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} requests_mock.get(metadata_url, json=metadata, status_code=200) - alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' - ) + alias_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' requests_mock.get(alias_url, status_code=200) validate_env_alias_or_address('test-alias', ContractType.IMA, ENDPOINT) -def test_validate_env_params_missing_key(): - populated_params = { - 'CONTAINER_CONFIGS_STREAM': 'value', - 'ENDPOINT': 'http://localhost:8545', - 'MANAGER_CONTRACTS': '', - 'FILEBEAT_HOST': '127.0.0.1:3010', - 'DISK_MOUNTPOINT': '/dev/sss', - 'SGX_SERVER_URL': 'http://127.0.0.1', - 'DOCKER_LVMPY_STREAM': 'value', - 'ENV_TYPE': 'mainnet', - } - with pytest.raises(SystemExit) as excinfo: - validate_env_params(populated_params) - assert excinfo.value.code == CLIExitCodes.FAILURE.value +@pytest.mark.parametrize('env_type', ALLOWED_MIRAGE_ENV_TYPES) +@pytest.mark.parametrize( + 'required_params, key_to_remove, should_fail', + [ + (REQUIRED_PARAMS_MIRAGE_BOOT, None, False), + (REQUIRED_PARAMS_MIRAGE, None, False), + (REQUIRED_PARAMS_MIRAGE_BOOT, 'IMA_CONTRACTS', True), + (REQUIRED_PARAMS_MIRAGE_BOOT, 'FILEBEAT_HOST', True), + (REQUIRED_PARAMS_MIRAGE, 'FILEBEAT_HOST', True), + ], + ids=[ + 'mirage_boot', + 'mirage_regular', + 'mirage_boot_missing_ima', + 'mirage_boot_missing_filebeat', + 'mirage_regular_missing_filebeat', + ], +) +@mock.patch('node_cli.configs.env.validate_env_alias_or_address') +@mock.patch('node_cli.configs.env.validate_env_type') +def test_validate_env_params_mirage( + mock_validate_type, + mock_validate_alias, + required_params, + key_to_remove, + should_fail, + env_type, +): + params = {k: f'{k}_val' for k in required_params} + params['ENV_TYPE'] = env_type + + if key_to_remove: + del params[key_to_remove] + + if should_fail: + with pytest.raises(SystemExit): + validate_env_params(node_type=NodeType.MIRAGE, params=params) + else: + validate_env_params(node_type=NodeType.MIRAGE, params=params) + + +@pytest.mark.parametrize( + 'node_type, is_boot, required_keys_dict', + [ + (NodeType.MIRAGE, True, REQUIRED_PARAMS_MIRAGE_BOOT), + (NodeType.MIRAGE, False, REQUIRED_PARAMS_MIRAGE), + ], + ids=['mirage_boot', 'mirage_regular'], +) +@mock.patch('node_cli.configs.env.validate_env_alias_or_address') +@mock.patch('node_cli.configs.env.get_chain_id', return_value=1) +@mock.patch( + 'node_cli.configs.env.get_network_metadata', + return_value={'networks': [{'chainId': 1, 'path': 'mainnet'}]}, +) +def test_get_validated_env_config_mirage_success( + mock_meta, + mock_chain, + mock_validate_alias, + tmp_path, + monkeypatch, + node_type, + is_boot, + required_keys_dict, +): + env_file = tmp_path / 'mirage.env' + env_content = '' + expected_config = {} + for key in {**required_keys_dict, **OPTIONAL_PARAMS}: + env_value = f'{key}_value' + if key == 'ENDPOINT': + env_value = ENDPOINT + if key == 'ENV_TYPE': + env_value = 'devnet-mirage' + if key == 'MANAGER_CONTRACTS': + env_value = '0x' + '1' * 40 + if key == 'IMA_CONTRACTS': + env_value = '0x' + '2' * 40 -def test_validate_env_params_success(valid_env_params, requests_mock): - endpoint = valid_env_params['ENDPOINT'] - requests_mock.post(endpoint, json={'result': '0x1'}) - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} - requests_mock.get(metadata_url, json=metadata, status_code=200) - ima_alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/mainnet-ima/test-ima.json' - ) - requests_mock.get(ima_alias_url, status_code=200) - manager_alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' - ) - requests_mock.get(manager_alias_url, status_code=200) - validate_env_params(valid_env_params) - - -def test_get_validated_env_config_success( - valid_env_file, mock_chain_response, mock_networks_metadata, requests_mock -): - requests_mock.post(ENDPOINT, json=mock_chain_response) - metadata_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' - ) - requests_mock.get(metadata_url, json=mock_networks_metadata, status_code=200) - ima_alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/mainnet-ima/test-ima.json' - ) - requests_mock.get(ima_alias_url, status_code=200) - manager_alias_url = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/mainnet/skale-manager/test-manager.json' - ) - requests_mock.get(manager_alias_url, status_code=200) - config = get_validated_env_config(valid_env_file) - assert config['ENDPOINT'] == 'http://localhost:8545' - assert config['ENV_TYPE'] in ALLOWED_ENV_TYPES + if key in required_keys_dict: + env_content += f'{key}={env_value}\n' + monkeypatch.setenv(key, env_value) + expected_config[key] = env_value + env_file.write_text(env_content) -def test_get_validated_env_config_missing_file(): - with pytest.raises(SystemExit) as excinfo: - get_validated_env_config('nonexistent.env') - assert excinfo.value.code == CLIExitCodes.FAILURE.value + with mock.patch('node_cli.configs.alias_address_validation.requests.post') as mock_post: + mock_post.return_value = FakeResponse(200, {'result': '0x123'}) + + config = get_validated_env_config( + node_type=node_type, env_filepath=str(env_file), is_mirage_boot=is_boot + ) + assert config is not None + assert set(config.keys()) == set(expected_config.keys()) + for key in expected_config: + assert config[key] == expected_config[key] -def test_get_validated_env_config_unreadable_file(valid_env_file): - os.chmod(valid_env_file, 0o000) - with pytest.raises(PermissionError): - get_validated_env_config(valid_env_file) - os.chmod(valid_env_file, 0o644) + for key in {**required_keys_dict, **OPTIONAL_PARAMS}: + monkeypatch.delenv(key, raising=False) + + +def test_get_validated_env_config_missing_file(): + with pytest.raises(SystemExit): + get_validated_env_config(env_filepath='nonexistent.env', node_type=NodeType.REGULAR) + + +def test_get_validated_env_config_unreadable_file(tmp_path): + env_file = tmp_path / 'unreadable.env' + env_file.touch() + original_mode = env_file.stat().st_mode + try: + os.chmod(env_file, 0o000) + with pytest.raises(PermissionError): + get_validated_env_config(env_filepath=str(env_file), node_type=NodeType.REGULAR) + finally: + os.chmod(env_file, original_mode) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 4f67139f..eb0b76b4 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -164,7 +164,8 @@ def test_init_node(no_resource_file): # todo: write new init node test assert os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) -def test_update_node(mocked_g_config, resource_file): +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) +def test_update_node(node_type, mocked_g_config, resource_file): env_filepath = './tests/test-env' resp_mock = response_mock(requests.codes.created) os.makedirs(NODE_DATA_PATH, exist_ok=True) @@ -188,7 +189,7 @@ def test_update_node(mocked_g_config, resource_file): with mock.patch( 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() ): # noqa - result = update(env_filepath, pull_config_for_schain=None) + result = update(env_filepath, pull_config_for_schain=None, node_type=node_type) assert result is None From 813c06f0720cec637f14ae6004c7653cfb3120e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Wed, 23 Apr 2025 18:14:27 +0100 Subject: [PATCH 041/332] Refactored functions to explicitly require the NodeType parameter, removing default values. --- node_cli/cli/node.py | 4 +-- node_cli/core/mirage_boot.py | 2 +- node_cli/core/node.py | 57 ++++++++++++++++++++-------------- node_cli/core/resources.py | 2 +- node_cli/operations/base.py | 32 +++++++++---------- node_cli/utils/docker_utils.py | 20 +++++------- tests/core/core_node_test.py | 14 ++++----- 7 files changed, 68 insertions(+), 63 deletions(-) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 10bbd17e..036bd3ee 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -85,7 +85,7 @@ def register_node(name, ip, port, domain): @click.argument('env_file') @streamed_cmd def init_node(env_file): - init(env_file) + init(env_filepath=env_file, node_type=TYPE) @node.command('update', help='Update node from .env file') @@ -196,7 +196,7 @@ def _turn_off(maintenance_on, unsafe_ok): @click.argument('env_file') @streamed_cmd def _turn_on(maintenance_off, sync_schains, env_file): - turn_on(maintenance_off, sync_schains, env_file) + turn_on(maintenance_off, sync_schains, env_file, node_type=TYPE) @node.command('set-domain', help='Set node domain name') diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index e98b1ae7..0e000dfd 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -48,7 +48,7 @@ def init(env_filepath: str) -> None: init_mirage_boot_op(env_filepath, env) logger.info('Waiting for mirage containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(NodeType.MIRAGE): + if not is_base_containers_alive(node_type=NodeType.MIRAGE): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Generating mirage resource allocation file ...') update_resource_allocation(env['ENV_TYPE']) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 2436c674..2cf00733 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -102,7 +102,7 @@ class NodeStatuses(Enum): NOT_CREATED = 5 -def is_update_safe(node_type: NodeType = NodeType.REGULAR) -> bool: +def is_update_safe(node_type: NodeType) -> bool: if not is_admin_running(node_type): if node_type == NodeType.SYNC: return True @@ -144,13 +144,13 @@ def register_node(name, p2p_ip, public_ip, port, domain_name): @check_not_inited -def init(env_filepath): - env = compose_node_env(env_filepath) +def init(env_filepath: str, node_type: NodeType) -> None: + env = compose_node_env(env_filepath=env_filepath, node_type=node_type) - init_op(env_filepath, env) + init_op(env_filepath=env_filepath, env=env, node_type=node_type) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(): + if not is_base_containers_alive(node_type=node_type): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Generating resource allocation file ...') update_resource_allocation(env['ENV_TYPE']) @@ -158,8 +158,8 @@ def init(env_filepath): @check_not_inited -def restore(backup_path, env_filepath, no_snapshot=False, config_only=False): - env = compose_node_env(env_filepath) +def restore(backup_path, env_filepath, node_type: NodeType, no_snapshot=False, config_only=False): + env = compose_node_env(env_filepath=env_filepath, node_type=node_type) if env is None: return save_env_params(env_filepath) @@ -169,7 +169,7 @@ def restore(backup_path, env_filepath, no_snapshot=False, config_only=False): logger.info('Adding BACKUP_RUN to env ...') env['BACKUP_RUN'] = 'True' # should be str - restored_ok = restore_op(env, backup_path, config_only=config_only) + restored_ok = restore_op(env, backup_path, node_type=node_type, config_only=config_only) if not restored_ok: error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) time.sleep(RESTORE_SLEEP_TIMEOUT) @@ -188,7 +188,7 @@ def init_sync( init_sync_op(env_filepath, env, indexer, archive, snapshot, snapshot_from) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(NodeType.SYNC): + if not is_base_containers_alive(node_type=NodeType.SYNC): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Sync node initialized successfully') @@ -205,7 +205,7 @@ def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(NodeType.SYNC) + alive = is_base_containers_alive(node_type=NodeType.SYNC) if not update_ok or not alive: print_node_cmd_error() return @@ -224,19 +224,27 @@ def cleanup_sync() -> None: def compose_node_env( env_filepath: str, + node_type: NodeType, inited_node: bool = False, sync_schains: Optional[bool] = None, pull_config_for_schain: Optional[str] = None, - node_type: NodeType = NodeType.REGULAR, save: bool = True, is_mirage_boot: bool = False, ) -> dict[str, str]: if env_filepath is not None: - env_params = get_validated_env_config(node_type=node_type, env_filepath=env_filepath) + env_params = get_validated_env_config( + node_type=node_type, + env_filepath=env_filepath, + is_mirage_boot=is_mirage_boot, + ) if save: save_env_params(env_filepath) else: - env_params = get_validated_env_config(node_type=node_type, env_filepath=INIT_ENV_FILEPATH) + env_params = get_validated_env_config( + node_type=node_type, + env_filepath=INIT_ENV_FILEPATH, + is_mirage_boot=is_mirage_boot, + ) if node_type == NodeType.SYNC or node_type == NodeType.MIRAGE: mnt_dir = SCHAINS_MNT_DIR_SINGLE_CHAIN @@ -284,12 +292,13 @@ def update( inited_node=True, sync_schains=False, pull_config_for_schain=pull_config_for_schain, + node_type=node_type, ) - update_ok = update_op(env_filepath, env) + update_ok = update_op(env_filepath, env, node_type=node_type) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive() + alive = is_base_containers_alive(node_type=node_type) if not update_ok or not alive: print_node_cmd_error() return @@ -392,23 +401,25 @@ def set_maintenance_mode_off(): @check_inited @check_user def turn_off(node_type: NodeType, maintenance_on: bool = False, unsafe_ok: bool = False) -> None: - if not unsafe_ok and not is_update_safe(): + if not unsafe_ok and not is_update_safe(node_type=node_type): error_msg = 'Cannot turn off safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) if maintenance_on: set_maintenance_mode_on() - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False) + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type) turn_off_op(node_type=node_type, env=env) @check_inited @check_user -def turn_on(maintenance_off, sync_schains, env_file): - env = compose_node_env(env_file, inited_node=True, sync_schains=sync_schains) - turn_on_op(env) +def turn_on(maintenance_off, sync_schains, env_file, node_type: NodeType) -> None: + env = compose_node_env( + env_file, inited_node=True, sync_schains=sync_schains, node_type=node_type + ) + turn_on_op(env=env, node_type=node_type) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(): + if not is_base_containers_alive(node_type=node_type): print_node_cmd_error() return logger.info('Node turned on') @@ -416,7 +427,7 @@ def turn_on(maintenance_off, sync_schains, env_file): set_maintenance_mode_off() -def get_base_containers_amount(node_type: NodeType = NodeType.REGULAR): +def get_base_containers_amount(node_type: NodeType): if node_type == NodeType.SYNC: return len(BASE_SYNC_COMPOSE_SERVICES) elif node_type == NodeType.MIRAGE: @@ -425,7 +436,7 @@ def get_base_containers_amount(node_type: NodeType = NodeType.REGULAR): return len(BASE_SKALE_COMPOSE_SERVICES) -def is_base_containers_alive(node_type: NodeType = NodeType.REGULAR) -> bool: +def is_base_containers_alive(node_type: NodeType) -> bool: if node_type == NodeType.MIRAGE: prefixes = ['mirage_', 'skale_'] else: diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 6dd799bc..669f8d0f 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -94,8 +94,8 @@ def compose_resource_allocation_config(env_type: str, params_by_env_type: Dict = def generate_resource_allocation_config( env_file, + node_type: NodeType, force=False, - node_type: NodeType = NodeType.REGULAR, ) -> None: if not force and os.path.isfile(RESOURCE_ALLOCATION_FILEPATH): msg = 'Resource allocation file already exists' diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 9c19224d..ea78cff0 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -106,8 +106,8 @@ def wrapper(env_filepath: str, env: Dict, *args, **kwargs): @checked_host -def update(env_filepath: str, env: Dict) -> bool: - compose_rm(env) +def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: + compose_rm(node_type=node_type, env=env) remove_dynamic_containers() sync_skale_node() @@ -143,13 +143,13 @@ def update(env_filepath: str, env: Dict) -> bool: distro.version(), ) update_images(env=env) - compose_up(env) + compose_up(env=env, node_type=node_type) return True @checked_host def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: - compose_rm(env, node_type=NodeType.MIRAGE) + compose_rm(node_type=NodeType.MIRAGE, env=env) sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -182,12 +182,12 @@ def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: distro.version(), ) update_images(env=env) - compose_up(env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.MIRAGE) return True @checked_host -def init(env_filepath: str, env: dict) -> None: +def init(env_filepath: str, env: dict, node_type: NodeType) -> None: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -217,7 +217,7 @@ def init(env_filepath: str, env: dict) -> None: update_resource_allocation(env_type=env['ENV_TYPE']) update_images(env=env) - compose_up(env) + compose_up(env=env, node_type=node_type) @checked_host @@ -248,7 +248,7 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: update_resource_allocation(env_type=env['ENV_TYPE']) update_images(env=env) - compose_up(env, node_type=NodeType.MIRAGE, is_mirage_boot=True) + compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) def init_sync( @@ -301,11 +301,11 @@ def init_sync( update_images(env=env, sync_node=True) - compose_up(env, node_type=NodeType.SYNC) + compose_up(env=env, node_type=NodeType.SYNC) def update_sync(env_filepath: str, env: Dict) -> bool: - compose_rm(env, node_type=NodeType.SYNC) + compose_rm(env=env, node_type=NodeType.SYNC) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) download_skale_node(env['CONTAINER_CONFIGS_STREAM'], env.get('CONTAINER_CONFIGS_DIR')) @@ -333,7 +333,7 @@ def update_sync(env_filepath: str, env: Dict) -> bool: ) update_images(env=env, sync_node=True) - compose_up(env, node_type=NodeType.SYNC) + compose_up(env=env, node_type=NodeType.SYNC) return True @@ -344,7 +344,7 @@ def turn_off(env: dict, node_type: NodeType) -> None: logger.info('Node was successfully turned off') -def turn_on(env: dict) -> None: +def turn_on(env: dict, node_type: NodeType) -> None: logger.info('Turning on the node...') update_meta( VERSION, @@ -360,10 +360,10 @@ def turn_on(env: dict) -> None: configure_nftables(enable_monitoring=enable_monitoring) logger.info('Launching containers on the node...') - compose_up(env) + compose_up(env=env, node_type=node_type) -def restore(env, backup_path, config_only=False): +def restore(env, backup_path, node_type: NodeType, config_only=False): unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -397,7 +397,7 @@ def restore(env, backup_path, config_only=False): update_resource_allocation(env_type=env['ENV_TYPE']) if not config_only: - compose_up(env) + compose_up(env=env, node_type=node_type) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -442,7 +442,7 @@ def restore_mirage(env, backup_path, config_only=False): ) if not config_only: - compose_up(env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.MIRAGE) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 56dc1c22..c652a270 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -238,7 +238,7 @@ def is_volume_exists(name: str, dutils=None): return True -def compose_rm(env={}, node_type: NodeType = NodeType.REGULAR): +def compose_rm(node_type: NodeType, env={}): logger.info('Removing compose containers') compose_path = get_compose_path(node_type) run_cmd( @@ -268,7 +268,7 @@ def compose_build(env: dict, sync_node: bool = False): run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'build'), env=env) -def get_compose_path(node_type: NodeType = NodeType.REGULAR) -> str: +def get_compose_path(node_type: NodeType) -> str: if node_type == NodeType.SYNC: return SYNC_COMPOSE_PATH elif node_type == NodeType.MIRAGE: @@ -277,7 +277,7 @@ def get_compose_path(node_type: NodeType = NodeType.REGULAR) -> str: return COMPOSE_PATH -def get_compose_services(node_type: NodeType = NodeType.REGULAR) -> tuple: +def get_compose_services(node_type: NodeType) -> tuple: if node_type == NodeType.SYNC: return BASE_SYNC_COMPOSE_SERVICES elif node_type == NodeType.MIRAGE: @@ -286,9 +286,7 @@ def get_compose_services(node_type: NodeType = NodeType.REGULAR) -> tuple: return BASE_SKALE_COMPOSE_SERVICES -def get_up_compose_cmd( - node_type: NodeType = NodeType.REGULAR, services: Optional[tuple] = None -) -> tuple: +def get_up_compose_cmd(node_type: NodeType, services: Optional[tuple] = None) -> tuple: compose_path = get_compose_path(node_type) if services is None: @@ -297,7 +295,7 @@ def get_up_compose_cmd( return ('docker', 'compose', '-f', compose_path, 'up', '-d', *services) -def compose_up(env, node_type: NodeType = NodeType.REGULAR, is_mirage_boot: bool = False): +def compose_up(env, node_type: NodeType, is_mirage_boot: bool = False): if node_type == NodeType.SYNC: logger.info('Running containers for sync node') run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.SYNC), env=env) @@ -377,18 +375,14 @@ def is_container_running(name: str, dclient: Optional[DockerClient] = None) -> b return False -def is_api_running( - node_type: NodeType = NodeType.REGULAR, dclient: Optional[DockerClient] = None -) -> bool: +def is_api_running(node_type: NodeType, dclient: Optional[DockerClient] = None) -> bool: if node_type == NodeType.MIRAGE: return is_container_running(name='mirage_api', dclient=dclient) else: return is_container_running(name='skale_api', dclient=dclient) -def is_admin_running( - node_type: NodeType = NodeType.REGULAR, client: Optional[DockerClient] = None -) -> bool: +def is_admin_running(node_type: NodeType, client: Optional[DockerClient] = None) -> bool: if node_type == NodeType.MIRAGE: return is_container_running(name='mirage_admin', dclient=client) elif node_type == NodeType.SYNC: diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index eb0b76b4..c8fc0f70 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -36,7 +36,7 @@ def skale_base_containers(): containers = [ dclient.containers.run(ALPINE_IMAGE_NAME, detach=True, name=f'skale_test{i}', command=CMD) - for i in range(get_base_containers_amount()) + for i in range(get_base_containers_amount(node_type=NodeType.REGULAR)) ] yield containers for c in containers: @@ -47,7 +47,7 @@ def skale_base_containers(): def skale_base_containers_without_one(): containers = [ dclient.containers.run(ALPINE_IMAGE_NAME, detach=True, name=f'skale_test{i}', command=CMD) - for i in range(get_base_containers_amount() - 1) + for i in range(get_base_containers_amount(node_type=NodeType.REGULAR) - 1) ] yield containers for c in containers: @@ -58,7 +58,7 @@ def skale_base_containers_without_one(): def skale_base_containers_exited(): containers = [ dclient.containers.run(HELLO_WORLD_IMAGE_NAME, detach=True, name=f'skale_test{i}') - for i in range(get_base_containers_amount()) + for i in range(get_base_containers_amount(node_type=NodeType.REGULAR)) ] time.sleep(10) yield containers @@ -111,19 +111,19 @@ def test_pack_dir(tmp_dir): def test_is_base_containers_alive(skale_base_containers): cont = skale_base_containers print([c.name for c in cont]) - assert is_base_containers_alive() + assert is_base_containers_alive(node_type=NodeType.REGULAR) def test_is_base_containers_alive_one_failed(skale_base_containers_without_one): - assert not is_base_containers_alive() + assert not is_base_containers_alive(node_type=NodeType.REGULAR) def test_is_base_containers_alive_exited(skale_base_containers_exited): - assert not is_base_containers_alive() + assert not is_base_containers_alive(node_type=NodeType.REGULAR) def test_is_base_containers_alive_empty(): - assert not is_base_containers_alive() + assert not is_base_containers_alive(node_type=NodeType.REGULAR) @pytest.fixture From 420e9e5eca9bdde705745b8881a94d00a96c18a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Wed, 23 Apr 2025 18:55:09 +0100 Subject: [PATCH 042/332] Started fixing env validation tests --- node_cli/cli/node.py | 8 ++++- tests/configs/configs_env_validate_test.py | 41 +++++++++++++++++++--- 2 files changed, 43 insertions(+), 6 deletions(-) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 036bd3ee..b0c6c696 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -137,7 +137,13 @@ def backup_node(backup_folder_path): ) @streamed_cmd def restore_node(backup_path, env_file, no_snapshot, config_only): - restore(backup_path, env_file, no_snapshot, config_only) + restore( + backup_path=backup_path, + env_filepath=env_file, + no_snapshot=no_snapshot, + config_only=config_only, + node_type=TYPE, + ) @node.command('maintenance-on', help='Set SKALE node into maintenance mode') diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index d6fa6afc..b42c48a8 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -12,9 +12,11 @@ get_validated_env_config, validate_env_params, validate_env_type, - ALLOWED_ENV_TYPES, + ALLOWED_SKALE_ENV_TYPES, ALLOWED_MIRAGE_ENV_TYPES, + ALLOWED_ENV_TYPES, REQUIRED_PARAMS_SKALE, + REQUIRED_PARAMS_SYNC, REQUIRED_PARAMS_MIRAGE_BOOT, REQUIRED_PARAMS_MIRAGE, OPTIONAL_PARAMS, @@ -80,7 +82,7 @@ def test_populate_env_params_updates_from_environ(monkeypatch): ( NodeType.SYNC, False, - {'SCHAIN_NAME', 'DOCKER_LVMPY_STREAM'} | REQUIRED_PARAMS_SKALE.keys(), + REQUIRED_PARAMS_SYNC.keys(), set(), ), ( @@ -110,9 +112,38 @@ def test_build_env_params_keys(node_type, is_mirage_boot, expected_keys, unexpec assert not found_unexpected, f'Found unexpected keys: {found_unexpected}' -@pytest.mark.parametrize('env_type', ALLOWED_ENV_TYPES) -def test_valid_env_types(env_type): - validate_env_type(node_type=NodeType.REGULAR, env_type=env_type) +@pytest.mark.parametrize( + 'node_type, env_types, should_fail', + [ + (NodeType.REGULAR, ALLOWED_SKALE_ENV_TYPES, False), + (NodeType.REGULAR, ALLOWED_MIRAGE_ENV_TYPES, True), + (NodeType.SYNC, ALLOWED_SKALE_ENV_TYPES, False), + (NodeType.SYNC, ALLOWED_MIRAGE_ENV_TYPES, True), + (NodeType.MIRAGE, ALLOWED_MIRAGE_ENV_TYPES, False), + (NodeType.MIRAGE, ALLOWED_SKALE_ENV_TYPES, True), + (NodeType.REGULAR, ['invalid'], True), + (NodeType.SYNC, ['invalid'], True), + (NodeType.MIRAGE, ['invalid'], True), + ], + ids=[ + 'correct_env_regular', + 'incorrect_env_regular', + 'correct_env_sync', + 'incorrect_env_sync', + 'correct_env_mirage', + 'incorrect_env_mirage', + 'invalid_regular', + 'invalid_sync', + 'invalid_mirage', + ], +) +def test_valid_env_types(node_type, env_types, should_fail): + for env_type in env_types: + if should_fail: + with pytest.raises(SystemExit): + validate_env_type(node_type=node_type, env_type=env_type) + else: + validate_env_type(node_type=node_type, env_type=env_type) def test_invalid_env_type(): From 5ff0c2f60410509d06e6fe439f34bd4ce0a8d7c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Wed, 23 Apr 2025 18:56:02 +0100 Subject: [PATCH 043/332] Removed unused import in env tests --- tests/configs/configs_env_validate_test.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index b42c48a8..bd80d9e1 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -14,7 +14,6 @@ validate_env_type, ALLOWED_SKALE_ENV_TYPES, ALLOWED_MIRAGE_ENV_TYPES, - ALLOWED_ENV_TYPES, REQUIRED_PARAMS_SKALE, REQUIRED_PARAMS_SYNC, REQUIRED_PARAMS_MIRAGE_BOOT, From 1cc3a6d8aa840ef387e60dd4b7ae6b016c57a559 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 24 Apr 2025 12:53:49 +0100 Subject: [PATCH 044/332] Fixed broken tests; Adapted `validate_env_params` to work with params required by new node types --- node_cli/configs/env.py | 4 +++- tests/cli/node_test.py | 8 ++++---- tests/cli/resources_allocation_test.py | 4 ++-- tests/cli/sync_node_test.py | 6 +++--- tests/configs/configs_env_validate_test.py | 8 ++++---- tests/core/core_node_test.py | 6 +++--- tests/core/host/docker_config_test.py | 2 +- 7 files changed, 20 insertions(+), 18 deletions(-) diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 05b8ba5a..755014b6 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -138,9 +138,11 @@ def validate_env_params( error_exit(f'Missing required parameters: {missing}') validate_env_type(node_type=node_type, env_type=params['ENV_TYPE']) endpoint = params['ENDPOINT'] - validate_env_alias_or_address(params['IMA_CONTRACTS'], ContractType.IMA, endpoint) validate_env_alias_or_address(params['MANAGER_CONTRACTS'], ContractType.MANAGER, endpoint) + if 'IMA_CONTRACTS' in params.keys(): + validate_env_alias_or_address(params['IMA_CONTRACTS'], ContractType.IMA, endpoint) + def validate_env_type(node_type: NodeType, env_type: str) -> None: allowed_env_types_for_node_type = list() diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index a94e4157..c337e5cf 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -338,7 +338,7 @@ def test_restore(mocked_g_config): return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), ), patch('node_cli.operations.base.configure_nftables'), - patch('node_cli.configs.env.validate_env_params', lambda params: None), + patch('node_cli.configs.env.validate_env_params'), ): result = run_command(restore_node, [backup_path, './tests/test-env']) assert result.exit_code == 0 @@ -364,7 +364,7 @@ def test_restore_no_snapshot(mocked_g_config): return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), ), patch('node_cli.operations.base.configure_nftables'), - patch('node_cli.configs.env.validate_env_params', lambda params: None), + patch('node_cli.configs.env.validate_env_params'), ): result = run_command(restore_node, [backup_path, './tests/test-env', '--no-snapshot']) assert result.exit_code == 0 @@ -403,7 +403,7 @@ def test_turn_off_maintenance_on(mocked_g_config): mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch('node_cli.core.node.turn_off_op'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - patch('node_cli.configs.env.validate_env_params', lambda params: None), + patch('node_cli.configs.env.validate_env_params'), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', @@ -435,7 +435,7 @@ def test_turn_on_maintenance_off(mocked_g_config): mock.patch('node_cli.core.node.turn_on_op'), mock.patch('node_cli.core.node.is_base_containers_alive'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - patch('node_cli.configs.env.validate_env_params', lambda params: None), + patch('node_cli.configs.env.validate_env_params'), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', diff --git a/tests/cli/resources_allocation_test.py b/tests/cli/resources_allocation_test.py index a169d7ac..b317aad6 100644 --- a/tests/cli/resources_allocation_test.py +++ b/tests/cli/resources_allocation_test.py @@ -56,7 +56,7 @@ def test_generate(): resp_mock = response_mock(requests.codes.created) with ( mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params'), ): result = run_command_mock( 'node_cli.utils.helper.post_request', resp_mock, generate, ['./tests/test-env', '--yes'] @@ -71,7 +71,7 @@ def test_generate_already_exists(resource_alloc_config): resp_mock = response_mock(requests.codes.created) with ( mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params'), ): result = run_command_mock( 'node_cli.utils.helper.post_request', resp_mock, generate, ['./tests/test-env', '--yes'] diff --git a/tests/cli/sync_node_test.py b/tests/cli/sync_node_test.py index db24799f..9206245a 100644 --- a/tests/cli/sync_node_test.py +++ b/tests/cli/sync_node_test.py @@ -45,7 +45,7 @@ def test_init_sync(mocked_g_config, clean_node_options): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params'), ): result = run_command(_init_sync, ['./tests/test-env']) @@ -78,7 +78,7 @@ def test_init_sync_archive(mocked_g_config, clean_node_options): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params'), ): result = run_command(_init_sync, ['./tests/test-env', '--archive']) node_options = NodeOptions() @@ -121,7 +121,7 @@ def test_update_sync(mocked_g_config): 'node_cli.core.node.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), - mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params'), ): result = run_command(_update_sync, ['./tests/test-env', '--yes']) assert result.exit_code == 0 diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index bd80d9e1..e4e6df02 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -276,7 +276,7 @@ def test_validate_env_params_mirage( params['ENV_TYPE'] = env_type if key_to_remove: - del params[key_to_remove] + params[key_to_remove] = '' if should_fail: with pytest.raises(SystemExit): @@ -293,10 +293,10 @@ def test_validate_env_params_mirage( ], ids=['mirage_boot', 'mirage_regular'], ) -@mock.patch('node_cli.configs.env.validate_env_alias_or_address') -@mock.patch('node_cli.configs.env.get_chain_id', return_value=1) +@mock.patch('node_cli.configs.alias_address_validation.validate_env_alias_or_address') +@mock.patch('node_cli.configs.alias_address_validation.get_chain_id', return_value=1) @mock.patch( - 'node_cli.configs.env.get_network_metadata', + 'node_cli.configs.alias_address_validation.get_network_metadata', return_value={'networks': [{'chainId': 1, 'path': 'mainnet'}]}, ) def test_get_validated_env_config_mirage_success( diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index c8fc0f70..a604e95d 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -158,9 +158,9 @@ def test_init_node(no_resource_file): # todo: write new init node test mock.patch('node_cli.core.node.init_op'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.utils.helper.post_request', resp_mock), - mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params'), ): - init(env_filepath) + init(env_filepath=env_filepath, node_type=NodeType.REGULAR) assert os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) @@ -184,7 +184,7 @@ def test_update_node(node_type, mocked_g_config, resource_file): 'node_cli.core.node.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), - mock.patch('node_cli.configs.env.validate_env_params', lambda params: None), + mock.patch('node_cli.configs.env.validate_env_params'), ): with mock.patch( 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() diff --git a/tests/core/host/docker_config_test.py b/tests/core/host/docker_config_test.py index 87eb391c..4d87cb54 100644 --- a/tests/core/host/docker_config_test.py +++ b/tests/core/host/docker_config_test.py @@ -130,7 +130,7 @@ def container(dclient): c.remove(force=True) -def test_assert_no_contaners(): +def test_assert_no_containers(): assert_no_containers(ignore=('ganache',)) From 21f57c2a6b25775df892094f000738049d563af9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 24 Apr 2025 12:55:33 +0100 Subject: [PATCH 045/332] Added mirage build and verification steps to github actions test workflow. --- .github/workflows/test.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 7ca8a514..95087baa 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -57,6 +57,16 @@ jobs: - name: Check build - sync run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-sync + - name: Build binary - mirage + run: | + mkdir -p ./dist + docker build . -t node-cli-builder + docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test mirage + docker rm -f $(docker ps -aq) + + - name: Check build - mirage + run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-mirage + - name: Run prepare test build run: | scripts/build.sh test test normal From a5ad6899a45dc5afdde4ac1f51d44a4f815d96f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 24 Apr 2025 17:48:56 +0100 Subject: [PATCH 046/332] Initial implementation of new core_node tests --- node_cli/core/mirage_boot.py | 4 +- node_cli/core/node.py | 36 +++--- node_cli/utils/docker_utils.py | 80 +++++++----- tests/conftest.py | 1 - tests/core/core_node_test.py | 225 ++++++++++++++++++++++++++------- 5 files changed, 242 insertions(+), 104 deletions(-) diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index 0e000dfd..c8a02f40 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -48,7 +48,7 @@ def init(env_filepath: str) -> None: init_mirage_boot_op(env_filepath, env) logger.info('Waiting for mirage containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(node_type=NodeType.MIRAGE): + if not is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Generating mirage resource allocation file ...') update_resource_allocation(env['ENV_TYPE']) @@ -70,7 +70,7 @@ def migrate(env_filepath: str, pull_config_for_schain: str) -> None: if migrate_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=NodeType.MIRAGE) + alive = is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True) if not migrate_ok or not alive: print_node_cmd_error() return diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 2cf00733..ca90bb69 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -79,6 +79,7 @@ BASE_SKALE_COMPOSE_SERVICES, BASE_SYNC_COMPOSE_SERVICES, BASE_MIRAGE_COMPOSE_SERVICES, + BASE_MIRAGE_BOOT_COMPOSE_SERVICES, ) from node_cli.utils.node_type import NodeType from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 @@ -427,31 +428,30 @@ def turn_on(maintenance_off, sync_schains, env_file, node_type: NodeType) -> Non set_maintenance_mode_off() -def get_base_containers_amount(node_type: NodeType): - if node_type == NodeType.SYNC: - return len(BASE_SYNC_COMPOSE_SERVICES) - elif node_type == NodeType.MIRAGE: - return len(BASE_MIRAGE_COMPOSE_SERVICES) +def get_expected_container_names(node_type: NodeType, is_mirage_boot: bool) -> list[str]: + if node_type == NodeType.MIRAGE and is_mirage_boot: + return list(BASE_MIRAGE_BOOT_COMPOSE_SERVICES.values()) + elif node_type == NodeType.MIRAGE and not is_mirage_boot: + return list(BASE_MIRAGE_COMPOSE_SERVICES.values()) + elif node_type == NodeType.SYNC: + return list(BASE_SYNC_COMPOSE_SERVICES.values()) else: - return len(BASE_SKALE_COMPOSE_SERVICES) + return list(BASE_SKALE_COMPOSE_SERVICES.values()) -def is_base_containers_alive(node_type: NodeType) -> bool: - if node_type == NodeType.MIRAGE: - prefixes = ['mirage_', 'skale_'] - else: - prefixes = ['skale_'] +def is_base_containers_alive(node_type: NodeType, is_mirage_boot: bool = False) -> bool: + base_container_names = get_expected_container_names(node_type, is_mirage_boot) dclient = docker.from_env() - containers = dclient.containers.list() + running_container_names = [container.name for container in dclient.containers.list()] - alive_containers = [] - for prefix in prefixes: - prefix_containers = list(filter(lambda c: c.name.startswith(prefix), containers)) - alive_containers.extend(prefix_containers) + for base_container in base_container_names: + if base_container in running_container_names: + continue + else: + return False - containers_amount = get_base_containers_amount(node_type) - return len(alive_containers) >= containers_amount + return True def get_node_info_plain(): diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index c652a270..cd027df2 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -45,32 +45,44 @@ IMA_REMOVE_TIMEOUT = 20 TELEGRAF_REMOVE_TIMEOUT = 20 -BASE_SKALE_COMPOSE_SERVICES = ( - 'transaction-manager', - 'skale-admin', - 'skale-api', - 'bounty', - 'nginx', - 'redis', - 'watchdog', - 'filebeat', -) -BASE_SYNC_COMPOSE_SERVICES = ('skale-sync-admin', 'nginx') -CORE_MIRAGE_COMPOSE_SERVICES = ( - 'transaction-manager', - 'skale-api', - 'redis', - 'watchdog', - 'nginx', - 'filebeat', -) -BASE_MIRAGE_COMPOSE_SERVICES = (*CORE_MIRAGE_COMPOSE_SERVICES, 'mirage-admin') -BASE_MIRAGE_BOOT_COMPOSE_SERVICES = (*CORE_MIRAGE_COMPOSE_SERVICES, 'mirage-boot') - -MONITORING_COMPOSE_SERVICES = ( - 'node-exporter', - 'advisor', -) +# Services have format : +CORE_COMMON_COMPOSE_SERVICES = { + 'transaction-manager': 'skale_transaction-manager', + 'redis': 'skale_redis', + 'watchdog': 'skale_watchdog', + 'nginx': 'skale_nginx', + 'filebeat': 'skale_filebeat', +} + +BASE_SKALE_COMPOSE_SERVICES = { + **CORE_COMMON_COMPOSE_SERVICES, + 'skale-admin': 'skale_admin', + 'skale-api': 'skale_api', + 'bounty': 'skale_bounty', +} + +CORE_MIRAGE_COMPOSE_SERVICES = { + **CORE_COMMON_COMPOSE_SERVICES, + 'mirage-api': 'mirage_api', +} +BASE_MIRAGE_COMPOSE_SERVICES = { + **CORE_MIRAGE_COMPOSE_SERVICES, + 'mirage-admin': 'mirage_admin', +} +BASE_MIRAGE_BOOT_COMPOSE_SERVICES = { + **CORE_MIRAGE_COMPOSE_SERVICES, + 'mirage-boot': 'mirage_boot_admin', +} + +BASE_SYNC_COMPOSE_SERVICES = { + 'skale-sync-admin': 'skale_sync_admin', + 'nginx': 'skale_nginx', +} + +MONITORING_COMPOSE_SERVICES = { + 'node-exporter': 'monitor_node_exporter', + 'advisor': 'monitor_cadvisor', +} TELEGRAF_SERVICES = ('telegraf',) NOTIFICATION_COMPOSE_SERVICES = ('celery',) COMPOSE_TIMEOUT = 10 @@ -277,16 +289,16 @@ def get_compose_path(node_type: NodeType) -> str: return COMPOSE_PATH -def get_compose_services(node_type: NodeType) -> tuple: +def get_compose_services(node_type: NodeType) -> list[str]: if node_type == NodeType.SYNC: - return BASE_SYNC_COMPOSE_SERVICES + return list(BASE_SYNC_COMPOSE_SERVICES) elif node_type == NodeType.MIRAGE: - return BASE_MIRAGE_COMPOSE_SERVICES + return list(BASE_MIRAGE_COMPOSE_SERVICES) else: - return BASE_SKALE_COMPOSE_SERVICES + return list(BASE_SKALE_COMPOSE_SERVICES) -def get_up_compose_cmd(node_type: NodeType, services: Optional[tuple] = None) -> tuple: +def get_up_compose_cmd(node_type: NodeType, services: Optional[list[str]] = None) -> tuple: compose_path = get_compose_path(node_type) if services is None: @@ -313,7 +325,7 @@ def compose_up(env, node_type: NodeType, is_mirage_boot: bool = False): logger.debug('Launching mirage boot containers with env %s', env) run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.MIRAGE, services=BASE_MIRAGE_BOOT_COMPOSE_SERVICES + node_type=NodeType.MIRAGE, services=list(BASE_MIRAGE_BOOT_COMPOSE_SERVICES) ), env=env, ) @@ -326,7 +338,7 @@ def compose_up(env, node_type: NodeType, is_mirage_boot: bool = False): logger.info('Running containers for Telegram notifications') run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.REGULAR, services=NOTIFICATION_COMPOSE_SERVICES + node_type=NodeType.REGULAR, services=list(NOTIFICATION_COMPOSE_SERVICES) ), env=env, ) @@ -335,7 +347,7 @@ def compose_up(env, node_type: NodeType, is_mirage_boot: bool = False): logger.info('Running monitoring containers') run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.REGULAR, services=MONITORING_COMPOSE_SERVICES + node_type=NodeType.REGULAR, services=list(MONITORING_COMPOSE_SERVICES) ), env=env, ) diff --git a/tests/conftest.py b/tests/conftest.py index 03adb3c1..65cb19ec 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -356,7 +356,6 @@ def valid_env_params(): @pytest.fixture def valid_env_file(valid_env_params): - """Create a temporary .env file whose contents mimic test-env.""" file_name = None try: with tempfile.NamedTemporaryFile(mode='w', delete=False) as f: diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index a604e95d..6f3c3963 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -9,15 +9,16 @@ import pytest import requests -from node_cli.configs import NODE_DATA_PATH +from node_cli.configs import NODE_DATA_PATH, SCHAINS_MNT_DIR_REGULAR, SCHAINS_MNT_DIR_SINGLE_CHAIN from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH from node_cli.core.node import ( - get_base_containers_amount, + get_expected_container_names, is_base_containers_alive, init, pack_dir, update, is_update_safe, + compose_node_env, ) from node_cli.utils.meta import CliMeta from node_cli.utils.node_type import NodeType @@ -28,42 +29,186 @@ dclient = docker.from_env() ALPINE_IMAGE_NAME = 'alpine:3.12' -HELLO_WORLD_IMAGE_NAME = 'hello-world' -CMD = 'sleep 10' +CMD = 'sleep 60' + +WRONG_CONTAINERS = [ + 'WRONG_CONTAINER_1', + 'WRONG_CONTAINER_2', + 'WRONG_CONTAINER_3', + 'skale_WRONG_CONTAINER_4', + 'skale_WRONG_CONTAINER_5', + 'mirage_WRONG_CONTAINER_6', + 'mirage_WRONG_CONTAINER_7', + 'sync_WRONG_CONTAINER_8', + 'sync_WRONG_CONTAINER_9', +] @pytest.fixture -def skale_base_containers(): - containers = [ - dclient.containers.run(ALPINE_IMAGE_NAME, detach=True, name=f'skale_test{i}', command=CMD) - for i in range(get_base_containers_amount(node_type=NodeType.REGULAR)) - ] - yield containers - for c in containers: - c.remove(force=True) +def manage_node_containers(request): + container_names_to_create = request.param + created_containers = [] + try: + for name in container_names_to_create: + container = dclient.containers.run( + ALPINE_IMAGE_NAME, detach=True, name=name, command=CMD + ) + created_containers.append(container) + if created_containers: + time.sleep(2) + yield created_containers + finally: + all_containers = dclient.containers.list(all=True) + for created_name in container_names_to_create: + for container in all_containers: + if container.name == created_name: + try: + container.remove(force=True) + except docker.errors.NotFound: + pass -@pytest.fixture -def skale_base_containers_without_one(): - containers = [ - dclient.containers.run(ALPINE_IMAGE_NAME, detach=True, name=f'skale_test{i}', command=CMD) - for i in range(get_base_containers_amount(node_type=NodeType.REGULAR) - 1) - ] - yield containers - for c in containers: - c.remove(force=True) +@pytest.mark.parametrize( + 'node_type, is_boot', + [ + (NodeType.REGULAR, False), + (NodeType.SYNC, False), + (NodeType.MIRAGE, True), + (NodeType.MIRAGE, False), + ], +) +@pytest.mark.parametrize('manage_node_containers', [[]], indirect=True) +def test_is_base_containers_alive(manage_node_containers, node_type, is_boot, request): + expected_names = get_expected_container_names(node_type, is_boot) + request.node.callspec.params['manage_node_containers'] = expected_names + request.getfixturevalue('manage_node_containers') # Trigger fixture -@pytest.fixture -def skale_base_containers_exited(): - containers = [ - dclient.containers.run(HELLO_WORLD_IMAGE_NAME, detach=True, name=f'skale_test{i}') - for i in range(get_base_containers_amount(node_type=NodeType.REGULAR)) - ] - time.sleep(10) - yield containers - for c in containers: - c.remove(force=True) + dclient = docker.from_env() + running_container_names = [container.name for container in dclient.containers.list()] + print(f'Running containers: {running_container_names}') + + assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) + + +@pytest.mark.parametrize( + 'node_type, is_boot', + [ + (NodeType.REGULAR, False), + (NodeType.SYNC, False), + (NodeType.MIRAGE, True), + (NodeType.MIRAGE, False), + ], +) +@pytest.mark.parametrize('manage_node_containers', [[]], indirect=True) +def test_is_base_containers_alive_wrong(manage_node_containers, node_type, is_boot, request): + request.node.callspec.params['manage_node_containers'] = WRONG_CONTAINERS + request.getfixturevalue('manage_node_containers') # Trigger fixture + + assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False + + +@pytest.mark.parametrize( + 'node_type, is_boot', + [ + (NodeType.REGULAR, False), + (NodeType.SYNC, False), + (NodeType.MIRAGE, True), + (NodeType.MIRAGE, False), + ], +) +@pytest.mark.parametrize('manage_node_containers', [[]], indirect=True) +def test_is_base_containers_alive_missing(manage_node_containers, node_type, is_boot, request): + expected_names = get_expected_container_names(node_type, is_boot) + + containers_to_create = expected_names[1:] + request.node.callspec.params['manage_node_containers'] = containers_to_create + request.getfixturevalue('manage_node_containers') # Trigger fixture + + assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False + + +@pytest.mark.parametrize( + 'node_type, is_boot', + [ + (NodeType.REGULAR, False), + (NodeType.SYNC, False), + (NodeType.MIRAGE, True), + (NodeType.MIRAGE, False), + ], +) +@pytest.mark.parametrize('manage_node_containers', [[]], indirect=True) +def test_is_base_containers_alive_empty(manage_node_containers, node_type, is_boot, request): + assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False + + +@pytest.mark.parametrize( + ( + 'node_type, is_boot, inited_node, sync_schains, expected_mnt_dir, ' + 'expect_flask_key, expect_backup_run' + ), + [ + (NodeType.REGULAR, False, True, False, SCHAINS_MNT_DIR_REGULAR, True, False), + (NodeType.REGULAR, False, True, True, SCHAINS_MNT_DIR_REGULAR, True, True), + (NodeType.SYNC, False, False, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, False, False), + (NodeType.MIRAGE, True, True, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, True, False), + (NodeType.MIRAGE, False, True, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, True, False), + ], + ids=[ + 'regular', + 'regular_sync_flag', + 'sync', + 'mirage_boot', + 'mirage_regular', + ], +) +@mock.patch('node_cli.core.node.get_validated_env_config') +@mock.patch('node_cli.core.node.save_env_params') +@mock.patch('node_cli.core.node.get_flask_secret_key', return_value='mock_secret') +def test_compose_node_env( + mock_get_secret, + mock_save_params, + mock_get_validated, + node_type, + is_boot, + inited_node, + sync_schains, + expected_mnt_dir, + expect_flask_key, + expect_backup_run, + valid_env_file, + valid_env_params, +): + mock_get_validated.return_value = valid_env_params.copy() + if node_type == NodeType.SYNC: + mock_get_validated.return_value['ENV_TYPE'] = 'devnet' + elif node_type == NodeType.MIRAGE: + mock_get_validated.return_value['ENV_TYPE'] = 'mainnet-mirage' + else: + mock_get_validated.return_value['ENV_TYPE'] = 'mainnet' + + result_env = compose_node_env( + env_filepath=valid_env_file, + inited_node=inited_node, + sync_schains=sync_schains, + node_type=node_type, + is_mirage_boot=is_boot, + save=True, + ) + + mock_save_params.assert_called_once_with(valid_env_file) + mock_get_validated.assert_called_once_with( + valid_env_file, node_type=node_type, is_mirage_boot=is_boot + ) + assert result_env['SCHAINS_MNT_DIR'] == expected_mnt_dir + assert ( + 'FLASK_SECRET_KEY' in result_env and result_env['FLASK_SECRET_KEY'] is not None + ) == expect_flask_key + if expect_flask_key: + assert result_env['FLASK_SECRET_KEY'] == 'mock_secret' + should_have_backup = sync_schains and node_type != NodeType.SYNC + assert ('BACKUP_RUN' in result_env and result_env['BACKUP_RUN'] == 'True') == should_have_backup + assert result_env['ENDPOINT'] == valid_env_params['ENDPOINT'] @pytest.fixture @@ -108,24 +253,6 @@ def test_pack_dir(tmp_dir): pack_dir(backup_dir, cleaned_archive_path, exclude=('trash_data',)) -def test_is_base_containers_alive(skale_base_containers): - cont = skale_base_containers - print([c.name for c in cont]) - assert is_base_containers_alive(node_type=NodeType.REGULAR) - - -def test_is_base_containers_alive_one_failed(skale_base_containers_without_one): - assert not is_base_containers_alive(node_type=NodeType.REGULAR) - - -def test_is_base_containers_alive_exited(skale_base_containers_exited): - assert not is_base_containers_alive(node_type=NodeType.REGULAR) - - -def test_is_base_containers_alive_empty(): - assert not is_base_containers_alive(node_type=NodeType.REGULAR) - - @pytest.fixture def no_resource_file(): try: From 0abf62d0ab718c477c0d2944823a8cd67afb72b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 24 Apr 2025 18:17:32 +0100 Subject: [PATCH 047/332] Fixed updated `core_node_test.py` --- tests/core/core_node_test.py | 149 ++++++++++++++++++----------------- 1 file changed, 77 insertions(+), 72 deletions(-) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 6f3c3963..8cb5c7f6 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -33,16 +33,51 @@ WRONG_CONTAINERS = [ 'WRONG_CONTAINER_1', - 'WRONG_CONTAINER_2', - 'WRONG_CONTAINER_3', 'skale_WRONG_CONTAINER_4', - 'skale_WRONG_CONTAINER_5', 'mirage_WRONG_CONTAINER_6', - 'mirage_WRONG_CONTAINER_7', 'sync_WRONG_CONTAINER_8', - 'sync_WRONG_CONTAINER_9', ] +NODE_TYPE_BOOT_COMBINATIONS: list[tuple[NodeType, bool]] = [ + (NodeType.REGULAR, False), + (NodeType.SYNC, False), + (NodeType.MIRAGE, True), + (NodeType.MIRAGE, False), +] + +alive_test_params = [ + pytest.param( + node_type, + is_boot, + get_expected_container_names(node_type, is_boot), + id=f'{node_type.name}-boot_{is_boot}-correct_containers', + ) + for node_type, is_boot in NODE_TYPE_BOOT_COMBINATIONS +] + +wrong_test_params = [ + pytest.param( + node_type, + is_boot, + WRONG_CONTAINERS, + id=f'{node_type.name}-boot_{is_boot}-wrong_containers', + ) + for node_type, is_boot in NODE_TYPE_BOOT_COMBINATIONS +] + +missing_test_params = [] +for node_type, is_boot in NODE_TYPE_BOOT_COMBINATIONS: + expected_names = get_expected_container_names(node_type, is_boot) + containers_to_create = expected_names[1:] + missing_test_params.append( + pytest.param( + node_type, + is_boot, + containers_to_create, + id=f'{node_type.name}-boot_{is_boot}-missing_containers', + ) + ) + @pytest.fixture def manage_node_containers(request): @@ -50,95 +85,65 @@ def manage_node_containers(request): created_containers = [] try: for name in container_names_to_create: + try: + existing_container = dclient.containers.get(name) + existing_container.remove(force=True) + except docker.errors.NotFound: + pass container = dclient.containers.run( - ALPINE_IMAGE_NAME, detach=True, name=name, command=CMD + ALPINE_IMAGE_NAME, + detach=True, + name=name, + command=CMD, ) created_containers.append(container) + if created_containers: time.sleep(2) + yield created_containers + finally: - all_containers = dclient.containers.list(all=True) - for created_name in container_names_to_create: - for container in all_containers: - if container.name == created_name: - try: - container.remove(force=True) - except docker.errors.NotFound: - pass + all_containers_now = dclient.containers.list(all=True) + cleaned_count = 0 + for container_obj in all_containers_now: + if container_obj.name in container_names_to_create: + try: + container_obj.remove(force=True) + cleaned_count += 1 + except docker.errors.NotFound: + pass @pytest.mark.parametrize( - 'node_type, is_boot', - [ - (NodeType.REGULAR, False), - (NodeType.SYNC, False), - (NodeType.MIRAGE, True), - (NodeType.MIRAGE, False), - ], + 'node_type, is_boot, manage_node_containers', + alive_test_params, + indirect=['manage_node_containers'], ) -@pytest.mark.parametrize('manage_node_containers', [[]], indirect=True) -def test_is_base_containers_alive(manage_node_containers, node_type, is_boot, request): - expected_names = get_expected_container_names(node_type, is_boot) - - request.node.callspec.params['manage_node_containers'] = expected_names - request.getfixturevalue('manage_node_containers') # Trigger fixture - - dclient = docker.from_env() - running_container_names = [container.name for container in dclient.containers.list()] - print(f'Running containers: {running_container_names}') - - assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) +def test_is_base_containers_alive(manage_node_containers, node_type, is_boot): + assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is True @pytest.mark.parametrize( - 'node_type, is_boot', - [ - (NodeType.REGULAR, False), - (NodeType.SYNC, False), - (NodeType.MIRAGE, True), - (NodeType.MIRAGE, False), - ], + 'node_type, is_boot, manage_node_containers', + wrong_test_params, + indirect=['manage_node_containers'], ) -@pytest.mark.parametrize('manage_node_containers', [[]], indirect=True) -def test_is_base_containers_alive_wrong(manage_node_containers, node_type, is_boot, request): - request.node.callspec.params['manage_node_containers'] = WRONG_CONTAINERS - request.getfixturevalue('manage_node_containers') # Trigger fixture - +def test_is_base_containers_alive_wrong(manage_node_containers, node_type, is_boot): assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False @pytest.mark.parametrize( - 'node_type, is_boot', - [ - (NodeType.REGULAR, False), - (NodeType.SYNC, False), - (NodeType.MIRAGE, True), - (NodeType.MIRAGE, False), - ], + 'node_type, is_boot, manage_node_containers', + missing_test_params, + indirect=['manage_node_containers'], ) -@pytest.mark.parametrize('manage_node_containers', [[]], indirect=True) -def test_is_base_containers_alive_missing(manage_node_containers, node_type, is_boot, request): - expected_names = get_expected_container_names(node_type, is_boot) - - containers_to_create = expected_names[1:] - request.node.callspec.params['manage_node_containers'] = containers_to_create - request.getfixturevalue('manage_node_containers') # Trigger fixture - +def test_is_base_containers_alive_missing(manage_node_containers, node_type, is_boot): assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False -@pytest.mark.parametrize( - 'node_type, is_boot', - [ - (NodeType.REGULAR, False), - (NodeType.SYNC, False), - (NodeType.MIRAGE, True), - (NodeType.MIRAGE, False), - ], -) -@pytest.mark.parametrize('manage_node_containers', [[]], indirect=True) -def test_is_base_containers_alive_empty(manage_node_containers, node_type, is_boot, request): +@pytest.mark.parametrize('node_type, is_boot', NODE_TYPE_BOOT_COMBINATIONS) +def test_is_base_containers_alive_empty(node_type, is_boot): assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False @@ -198,7 +203,7 @@ def test_compose_node_env( mock_save_params.assert_called_once_with(valid_env_file) mock_get_validated.assert_called_once_with( - valid_env_file, node_type=node_type, is_mirage_boot=is_boot + env_filepath=valid_env_file, node_type=node_type, is_mirage_boot=is_boot ) assert result_env['SCHAINS_MNT_DIR'] == expected_mnt_dir assert ( From 55f50d1f3135602b14afa912cc8758b2ed064295 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 24 Apr 2025 19:37:08 +0100 Subject: [PATCH 048/332] Updated docker-compose checks test; Added mirage disabled package requirements test --- tests/core/core_checks_test.py | 68 ++++++++++++++++++---------------- 1 file changed, 36 insertions(+), 32 deletions(-) diff --git a/tests/core/core_checks_test.py b/tests/core/core_checks_test.py index b490b1ee..d30b391c 100644 --- a/tests/core/core_checks_test.py +++ b/tests/core/core_checks_test.py @@ -1,7 +1,6 @@ import os import shutil -import time -from pip._internal import main as pipmain +import subprocess import mock import pytest @@ -38,6 +37,13 @@ def requirements_data(): } +@pytest.fixture +def mirage_requirements_data(requirements_data): + reqs = {k: v.copy() for k, v in requirements_data.items()} + reqs['package']['lvm2'] = 'disabled' + return reqs + + @pytest.fixture def server_req(requirements_data): return requirements_data['server'] @@ -195,41 +201,27 @@ def test_checks_docker_api(docker_req): assert r.info['expected_version'] == '111.111.111' -@pytest.fixture -def docker_compose_pkg_1_27_4(): - pipmain(['install', 'docker-compose==1.27.4']) - time.sleep(10) - yield - pipmain(['uninstall', 'docker-compose', '-y']) - - -@pytest.fixture -def docker_compose_pkg_1_24_1(): - pipmain(['install', 'docker-compose==1.24.1']) - time.sleep(10) - yield - pipmain(['uninstall', 'docker-compose', '-y']) - +@mock.patch('node_cli.utils.helper.subprocess.run') +@mock.patch('node_cli.core.checks.shutil.which', return_value='/usr/bin/docker') +def test_checks_docker_compose_version_mocked(mock_shutil_which, mock_subprocess_run, docker_req): + checker = DockerChecker(docker_req) + expected_version = docker_req['docker-compose'] -def test_checks_docker_compose_good_pkg(docker_req, docker_compose_pkg_1_27_4): - checker = DockerChecker(package_req) - r = checker.docker_compose() - r.name == 'docker-compose' - r.status == 'ok' + mock_output = f'Docker Compose version v{expected_version}, build somehash'.encode('utf-8') + mock_result = mock.Mock(spec=subprocess.CompletedProcess) + mock_result.stdout = mock_output + mock_result.stderr = None + mock_result.returncode = 0 + mock_subprocess_run.return_value = mock_result -def test_checks_docker_compose_no_pkg(docker_req): - checker = DockerChecker(package_req) r = checker.docker_compose() - r.name == 'docker-compose' - r.status == 'ok' - -def test_checks_docker_compose_invalid_version(docker_req, docker_compose_pkg_1_24_1): - checker = DockerChecker(docker_req) - r = checker.docker_compose() - r.name == 'docker-compose' - r.status == 'ok' + assert r.name == 'docker' + assert r.status == 'ok', f'Check failed: {r}' + assert isinstance(r.info, str) + assert f'expected docker compose version {expected_version}' in r.info.lower() + assert f'actual v{expected_version}' in r.info.lower() def test_checks_docker_config(docker_req): @@ -344,6 +336,18 @@ def test_get_checks(requirements_data): assert len(checks) == 2 +def test_get_checks_mirage(mirage_requirements_data): + disk = 'test-disk' + mirage_checkers = get_all_checkers(disk, mirage_requirements_data) + + mirage_all_checks = get_checks(mirage_checkers, CheckType.ALL) + mirage_all_names = {f.func.__name__ for f in mirage_all_checks} + assert 'network' in mirage_all_names + assert 'lvm2' not in mirage_all_names + assert 'cpu_total' in mirage_all_names + assert 'btrfs_progs' in mirage_all_names + + def test_get_save_report(tmp_dir_path): path = os.path.join(tmp_dir_path, 'checks.json') report = get_report(path) From d1a8ab84e83451ac97a0d22f1a475f93e75afebd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 24 Apr 2025 19:49:11 +0100 Subject: [PATCH 049/332] Added simple `info` command CLI test --- tests/cli/main_test.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tests/cli/main_test.py b/tests/cli/main_test.py index 5ce570ad..2d0e74c1 100644 --- a/tests/cli/main_test.py +++ b/tests/cli/main_test.py @@ -18,7 +18,7 @@ # along with this program. If not, see . -from node_cli.main import version +from node_cli.main import version, info from tests.helper import run_command @@ -28,3 +28,15 @@ def test_version(): assert result.output == expected result = run_command(version, ['--short']) assert result.output == 'test\n' + + +def test_info_command(): + result = run_command(info, []) + + assert result.exit_code == 0 + + expected_line = 'Full version: test' + assert expected_line in result.output + + assert 'Version:' in result.output + assert 'Build time:' in result.output From 7a386c967bd16a897c056c4e2eadae2e98e8ac3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 29 Apr 2025 11:09:07 +0100 Subject: [PATCH 050/332] Added nginx template processing tests. --- node_cli/core/nginx.py | 8 +-- tests/core/nginx_test.py | 144 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 148 insertions(+), 4 deletions(-) create mode 100644 tests/core/nginx_test.py diff --git a/node_cli/core/nginx.py b/node_cli/core/nginx.py index 84ba8f62..dfd63c4e 100644 --- a/node_cli/core/nginx.py +++ b/node_cli/core/nginx.py @@ -33,10 +33,6 @@ SSL_CRT_NAME = 'ssl_cert' -def is_regular_node_nginx() -> bool: - return TYPE != NodeType.MIRAGE - - def generate_nginx_config() -> None: ssl_on = check_ssl_certs() regular_node = is_regular_node_nginx() @@ -54,6 +50,10 @@ def check_ssl_certs(): return os.path.exists(crt_path) and os.path.exists(key_path) +def is_regular_node_nginx() -> bool: + return TYPE != NodeType.MIRAGE + + def reload_nginx() -> None: dutils = docker_client() generate_nginx_config() diff --git a/tests/core/nginx_test.py b/tests/core/nginx_test.py new file mode 100644 index 00000000..657f9432 --- /dev/null +++ b/tests/core/nginx_test.py @@ -0,0 +1,144 @@ +import os +from pathlib import Path + +import pytest +import mock + +from node_cli.core.nginx import ( + generate_nginx_config, + check_ssl_certs, + is_regular_node_nginx, + SSL_KEY_NAME, + SSL_CRT_NAME, +) +from node_cli.utils.node_type import NodeType +from node_cli.configs import NGINX_TEMPLATE_FILEPATH, NGINX_CONFIG_FILEPATH, NODE_CERTS_PATH + +TEST_NGINX_TEMPLATE = """ +server { + listen 3009; + {% if ssl %} + listen 311 ssl; + ssl_certificate /ssl/ssl_cert; + ssl_certificate_key /ssl/ssl_key; + {% endif %} +} + +{% if regular_node %} +server { + listen 80; + {% if ssl %} + listen 443 ssl; + ssl_certificate /ssl/ssl_cert; + ssl_certificate_key /ssl/ssl_key; + {% endif %} +} +{% endif %} +""" + +CORE_SSL_SNIPPET = 'listen 311 ssl;' +FILESTORAGE_SNIPPET = 'listen 80;' +FILESTORAGE_SSL_SNIPPET = 'listen 443 ssl;' + + +@pytest.fixture +def nginx_template(): + """Create a temporary nginx template file.""" + os.makedirs(os.path.dirname(NGINX_TEMPLATE_FILEPATH), exist_ok=True) + with open(NGINX_TEMPLATE_FILEPATH, 'w') as f: + f.write(TEST_NGINX_TEMPLATE) + yield + try: + os.remove(NGINX_TEMPLATE_FILEPATH) + os.remove(NGINX_CONFIG_FILEPATH) + except FileNotFoundError: + pass + + +@pytest.mark.parametrize( + 'node_type, ssl_exists, expected_regular_flag, expected_ssl_flag', + [ + (NodeType.REGULAR, True, True, True), + (NodeType.REGULAR, False, True, False), + (NodeType.SYNC, True, True, True), + (NodeType.SYNC, False, True, False), + (NodeType.MIRAGE, True, False, True), + (NodeType.MIRAGE, False, False, False), + ], + ids=[ + 'regular_ssl_on', + 'regular_ssl_off', + 'regular_ssl_on', + 'regular_ssl_off', + 'mirage_ssl_on', + 'mirage_ssl_off', + ], +) +@mock.patch('node_cli.core.nginx.check_ssl_certs') +@mock.patch('node_cli.core.nginx.TYPE') +def test_generate_nginx_config( + mock_type, + mock_check_ssl, + node_type, + ssl_exists, + expected_regular_flag, + expected_ssl_flag, + nginx_template, +): + mock_type.__eq__.side_effect = lambda other: node_type == other + mock_type.__ne__.side_effect = lambda other: node_type != other + mock_check_ssl.return_value = ssl_exists + + generate_nginx_config() + + assert os.path.exists(NGINX_CONFIG_FILEPATH) + with open(NGINX_CONFIG_FILEPATH) as f: + rendered_config = f.read() + + rendered_config = rendered_config.strip() + + if expected_regular_flag: + assert FILESTORAGE_SNIPPET in rendered_config + else: + assert FILESTORAGE_SNIPPET not in rendered_config + + if expected_ssl_flag: + assert CORE_SSL_SNIPPET in rendered_config + else: + assert CORE_SSL_SNIPPET not in rendered_config + + if expected_regular_flag and expected_ssl_flag: + assert FILESTORAGE_SSL_SNIPPET in rendered_config + else: + assert FILESTORAGE_SSL_SNIPPET not in rendered_config + + +def test_check_ssl_certs_exist(ssl_folder): + Path(os.path.join(NODE_CERTS_PATH, SSL_CRT_NAME)).touch() + Path(os.path.join(NODE_CERTS_PATH, SSL_KEY_NAME)).touch() + assert check_ssl_certs() + + +def test_check_ssl_certs_missing_one(ssl_folder): + Path(os.path.join(NODE_CERTS_PATH, SSL_CRT_NAME)).touch() + assert check_ssl_certs() is False + + +def test_check_ssl_certs_missing_both(ssl_folder): + assert check_ssl_certs() is False + + +@pytest.mark.parametrize( + 'node_type, expected_result', + [ + (NodeType.REGULAR, True), + (NodeType.SYNC, True), + (NodeType.MIRAGE, False), + ], +) +@mock.patch('node_cli.core.nginx.TYPE') +def test_is_regular_node_nginx(mock_type, node_type, expected_result): + mock_type.__eq__.side_effect = lambda other: node_type == other + mock_type.__ne__.side_effect = lambda other: node_type != other + + assert is_regular_node_nginx() is expected_result From b76ec43644e5edb429e5a9bcb5f0065fc8caa9a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 29 Apr 2025 11:48:33 +0100 Subject: [PATCH 051/332] Added mirage core tests --- tests/core/core_mirage_test.py | 96 ++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 tests/core/core_mirage_test.py diff --git a/tests/core/core_mirage_test.py b/tests/core/core_mirage_test.py new file mode 100644 index 00000000..2063ac8f --- /dev/null +++ b/tests/core/core_mirage_test.py @@ -0,0 +1,96 @@ +from unittest import mock + +from node_cli.configs import SKALE_DIR +from node_cli.core.mirage_boot import init as init_boot, migrate +from node_cli.core.mirage_node import restore_mirage +from node_cli.utils.node_type import NodeType + + +@mock.patch('node_cli.core.mirage_node.time.sleep') +@mock.patch('node_cli.core.mirage_node.restore_mirage_op') +@mock.patch('node_cli.core.mirage_node.save_env_params') +@mock.patch('node_cli.core.mirage_node.compose_node_env') +def test_restore_mirage( + mock_compose_env, + mock_save_env, + mock_restore_op, + mock_sleep, + valid_env_file, + ensure_meta_removed, +): + mock_env = {'ENV_TYPE': 'devnet-mirage'} + mock_compose_env.return_value = mock_env + mock_restore_op.return_value = True + backup_path = '/fake/backup' + + restore_mirage(backup_path, valid_env_file) + + mock_compose_env.assert_called_once_with(valid_env_file, node_type=NodeType.MIRAGE) + mock_save_env.assert_called_once_with(valid_env_file) + expected_env = {**mock_env, 'SKALE_DIR': SKALE_DIR} + mock_restore_op.assert_called_once_with(expected_env, backup_path, config_only=False) + mock_sleep.assert_called_once() + + +@mock.patch('node_cli.core.mirage_boot.update_resource_allocation') +@mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.core.mirage_boot.time.sleep') +@mock.patch('node_cli.core.mirage_boot.init_mirage_boot_op') +@mock.patch('node_cli.core.mirage_boot.compose_node_env') +def test_init_mirage_boot( + mock_compose_env, + mock_init_op, + mock_sleep, + mock_is_alive, + mock_update_alloc, + valid_env_file, + ensure_meta_removed, +): + mock_env = {'ENV_TYPE': 'devnet-mirage'} + mock_compose_env.return_value = mock_env + + init_boot(valid_env_file) + + mock_compose_env.assert_called_once_with( + valid_env_file, + node_type=NodeType.MIRAGE, + is_mirage_boot=True, + ) + mock_init_op.assert_called_once_with(valid_env_file, mock_env) + mock_sleep.assert_called_once() + mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) + mock_update_alloc.assert_called_once_with(mock_env['ENV_TYPE']) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.core.mirage_boot.time.sleep') +@mock.patch('node_cli.core.mirage_boot.migrate_mirage_boot_op') +@mock.patch('node_cli.core.mirage_boot.compose_node_env') +def test_migrate_mirage_boot( + mock_compose_env, + mock_migrate_op, + mock_sleep, + mock_is_alive, + mock_is_user_valid, + valid_env_file, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet-mirage'} + mock_compose_env.return_value = mock_env + mock_migrate_op.return_value = True + pull_config_for_schain = 'some_schain' + + migrate(valid_env_file, pull_config_for_schain) + + mock_compose_env.assert_called_once_with( + valid_env_file, + inited_node=True, + sync_schains=False, + pull_config_for_schain=pull_config_for_schain, + node_type=NodeType.MIRAGE, + ) + mock_migrate_op.assert_called_once_with(valid_env_file, mock_env) + mock_sleep.assert_called_once() + mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) From 85e161fba981031a90da906f8ed9794820b6e0c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 29 Apr 2025 12:13:53 +0100 Subject: [PATCH 052/332] Added mirage CLI tests --- tests/cli/mirage_cli_test.py | 172 +++++++++++++++++++++++++++++++++++ 1 file changed, 172 insertions(+) create mode 100644 tests/cli/mirage_cli_test.py diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py new file mode 100644 index 00000000..70b1ff0c --- /dev/null +++ b/tests/cli/mirage_cli_test.py @@ -0,0 +1,172 @@ +import pytest +from click.testing import CliRunner +from unittest import mock +import pathlib + +from node_cli.cli.mirage_node import ( + restore_node, + backup_node, + signature_node, + init_node as init_node_placeholder, + register_node as register_node_placeholder, + update_node as update_node_placeholder, +) +from node_cli.cli.mirage_boot import ( + init_boot, + register_boot, + signature_boot, + migrate_boot, +) + + +@mock.patch('node_cli.cli.mirage_node.restore_mirage') +def test_mirage_node_restore(mock_restore_core, valid_env_file, tmp_path): + runner = CliRunner() + backup_file = tmp_path / 'backup.tar.gz' + backup_file.touch() + backup_path = str(backup_file) + + result = runner.invoke(restore_node, [backup_path, valid_env_file]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_restore_core.assert_called_once_with(backup_path, valid_env_file, False) + + +@mock.patch('node_cli.cli.mirage_node.restore_mirage') +def test_mirage_node_restore_config_only(mock_restore_core, valid_env_file, tmp_path): + runner = CliRunner() + backup_file = tmp_path / 'backup_config.tar.gz' + backup_file.touch() + backup_path = str(backup_file) + + result = runner.invoke(restore_node, [backup_path, valid_env_file, '--config-only']) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_restore_core.assert_called_once_with(backup_path, valid_env_file, True) + + +@mock.patch('node_cli.cli.mirage_node.backup') +def test_mirage_node_backup(mock_backup_core, tmp_path): + runner = CliRunner() + backup_folder = str(tmp_path / 'backups') + pathlib.Path(backup_folder).mkdir(exist_ok=True) + + result = runner.invoke(backup_node, [backup_folder]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_backup_core.assert_called_once_with(backup_folder) + + +@mock.patch('node_cli.cli.mirage_node.get_node_signature') +def test_mirage_node_signature(mock_signature_core): + runner = CliRunner() + validator_id = '42' + signature_val = '0xabc123' + mock_signature_core.return_value = signature_val + + result = runner.invoke(signature_node, [validator_id]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_signature_core.assert_called_once_with(validator_id) + assert f'Signature: {signature_val}' in result.output + + +@mock.patch('node_cli.cli.mirage_node.get_node_signature') +def test_mirage_node_signature_error(mock_signature_core): + runner = CliRunner() + validator_id = '43' + error_msg = 'Core layer error' + mock_signature_core.return_value = {'error': True, 'message': error_msg} + + result = runner.invoke(signature_node, [validator_id]) + + assert result.exit_code != 0, f'Output: {result.output}\nException: {result.exception}' + mock_signature_core.assert_called_once_with(validator_id) + assert error_msg in result.output + + +@mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True) +def test_mirage_node_init_placeholder(mock_is_inited, resource_alloc): + runner = CliRunner() + result = runner.invoke(init_node_placeholder, []) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + assert "Placeholder: Command 'mirage node init' is not yet implemented." in result.output + + +@mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True) +def test_mirage_node_register_placeholder(mock_is_inited, resource_alloc): + runner = CliRunner() + result = runner.invoke(register_node_placeholder, []) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + assert "Placeholder: Command 'mirage node register' is not yet implemented." in result.output + + +@mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True) +def test_mirage_node_update_placeholder(mock_is_inited, resource_alloc, valid_env_file): + runner = CliRunner() + result = runner.invoke(update_node_placeholder, ['--yes', valid_env_file]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + assert "Placeholder: Command 'mirage node update' is not yet implemented." in result.output + + +@mock.patch('node_cli.cli.mirage_boot.register') +def test_mirage_boot_register(mock_register_core): + runner = CliRunner() + name = 'test-boot-node' + ip = '1.2.3.4' + port = 10001 + domain = 'boot.skale.test' + + result = runner.invoke( + register_boot, ['--name', name, '--ip', ip, '--port', str(port), '--domain', domain] + ) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_register_core.assert_called_once_with( + name=name, p2p_ip=ip, public_ip=ip, port=port, domain_name=domain + ) + + +@mock.patch('node_cli.cli.mirage_boot.get_node_signature') +def test_mirage_boot_signature(mock_signature_core): + runner = CliRunner() + validator_id = '101' + signature_val = '0xdef456' + mock_signature_core.return_value = signature_val + + result = runner.invoke(signature_boot, [validator_id]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_signature_core.assert_called_once_with(validator_id) + assert f'Signature: {signature_val}' in result.output + + +@mock.patch('node_cli.cli.mirage_boot.init') +def test_mirage_boot_init(mock_init_core, valid_env_file): + runner = CliRunner() + result = runner.invoke(init_boot, [valid_env_file]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_init_core.assert_called_once_with(valid_env_file) + + +@mock.patch('node_cli.cli.mirage_boot.migrate') +def test_mirage_boot_migrate(mock_migrate_core, valid_env_file): + runner = CliRunner() + result = runner.invoke(migrate_boot, ['--yes', valid_env_file]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_migrate_core.assert_called_once_with(valid_env_file, None) + + +@mock.patch('node_cli.cli.mirage_boot.migrate') +def test_mirage_boot_migrate_pull_config(mock_migrate_core, valid_env_file): + runner = CliRunner() + schain_name = 'my-schain-config' + result = runner.invoke(migrate_boot, ['--yes', '--pull-config', schain_name, valid_env_file]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_migrate_core.assert_called_once_with(valid_env_file, schain_name) From 3cdf90f96311ef50194cb89d6a4961d433933539 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 29 Apr 2025 12:14:08 +0100 Subject: [PATCH 053/332] Fixed unused import --- tests/cli/mirage_cli_test.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py index 70b1ff0c..b909b8d1 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/mirage_cli_test.py @@ -1,4 +1,3 @@ -import pytest from click.testing import CliRunner from unittest import mock import pathlib From 2c736e9ea049b332a52ada5769d0ac56164117cd Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 30 Apr 2025 19:05:13 +0100 Subject: [PATCH 054/332] Add mirage branch to publish pipeline --- .github/workflows/publish.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index abdf44a9..12f6d9b6 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -8,6 +8,7 @@ on: - beta - stable - 'v*.*.*' + - 'mirage' jobs: create_release: From 517623d3f3b6371cfd66f2e601ed297ad5cb57eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 6 May 2025 18:12:19 +0100 Subject: [PATCH 055/332] Addressed some comments in PR #851 --- node_cli/core/nginx.py | 2 +- node_cli/core/node.py | 16 ++++++++-------- node_cli/utils/docker_utils.py | 16 ++++++++++------ tests/core/nginx_test.py | 11 ++++++----- 4 files changed, 25 insertions(+), 20 deletions(-) diff --git a/node_cli/core/nginx.py b/node_cli/core/nginx.py index dfd63c4e..97ea4844 100644 --- a/node_cli/core/nginx.py +++ b/node_cli/core/nginx.py @@ -51,7 +51,7 @@ def check_ssl_certs(): def is_regular_node_nginx() -> bool: - return TYPE != NodeType.MIRAGE + return TYPE in [NodeType.REGULAR, NodeType.SYNC] def reload_nginx() -> None: diff --git a/node_cli/core/node.py b/node_cli/core/node.py index ca90bb69..76552fe4 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -430,25 +430,25 @@ def turn_on(maintenance_off, sync_schains, env_file, node_type: NodeType) -> Non def get_expected_container_names(node_type: NodeType, is_mirage_boot: bool) -> list[str]: if node_type == NodeType.MIRAGE and is_mirage_boot: - return list(BASE_MIRAGE_BOOT_COMPOSE_SERVICES.values()) + services = BASE_MIRAGE_BOOT_COMPOSE_SERVICES elif node_type == NodeType.MIRAGE and not is_mirage_boot: - return list(BASE_MIRAGE_COMPOSE_SERVICES.values()) + services = BASE_MIRAGE_COMPOSE_SERVICES elif node_type == NodeType.SYNC: - return list(BASE_SYNC_COMPOSE_SERVICES.values()) + services = BASE_SYNC_COMPOSE_SERVICES else: - return list(BASE_SKALE_COMPOSE_SERVICES.values()) + services = BASE_SKALE_COMPOSE_SERVICES + + return list(services.values()) def is_base_containers_alive(node_type: NodeType, is_mirage_boot: bool = False) -> bool: base_container_names = get_expected_container_names(node_type, is_mirage_boot) dclient = docker.from_env() - running_container_names = [container.name for container in dclient.containers.list()] + running_container_names = set(container.name for container in dclient.containers.list()) for base_container in base_container_names: - if base_container in running_container_names: - continue - else: + if base_container not in running_container_names: return False return True diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index cd027df2..60e869f0 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -291,11 +291,13 @@ def get_compose_path(node_type: NodeType) -> str: def get_compose_services(node_type: NodeType) -> list[str]: if node_type == NodeType.SYNC: - return list(BASE_SYNC_COMPOSE_SERVICES) + result = list(BASE_SYNC_COMPOSE_SERVICES) elif node_type == NodeType.MIRAGE: - return list(BASE_MIRAGE_COMPOSE_SERVICES) + result = list(BASE_MIRAGE_COMPOSE_SERVICES) else: - return list(BASE_SKALE_COMPOSE_SERVICES) + result = list(BASE_SKALE_COMPOSE_SERVICES) + + return result def get_up_compose_cmd(node_type: NodeType, services: Optional[list[str]] = None) -> tuple: @@ -396,11 +398,13 @@ def is_api_running(node_type: NodeType, dclient: Optional[DockerClient] = None) def is_admin_running(node_type: NodeType, client: Optional[DockerClient] = None) -> bool: if node_type == NodeType.MIRAGE: - return is_container_running(name='mirage_admin', dclient=client) + result = is_container_running(name='mirage_admin', dclient=client) elif node_type == NodeType.SYNC: - return is_container_running(name='skale_sync_admin', dclient=client) + result = is_container_running(name='skale_sync_admin', dclient=client) else: - return is_container_running(name='skale_admin', dclient=client) + result = is_container_running(name='skale_admin', dclient=client) + + return result def system_prune(): diff --git a/tests/core/nginx_test.py b/tests/core/nginx_test.py index 657f9432..b19a21ca 100644 --- a/tests/core/nginx_test.py +++ b/tests/core/nginx_test.py @@ -47,12 +47,13 @@ def nginx_template(): os.makedirs(os.path.dirname(NGINX_TEMPLATE_FILEPATH), exist_ok=True) with open(NGINX_TEMPLATE_FILEPATH, 'w') as f: f.write(TEST_NGINX_TEMPLATE) - yield try: - os.remove(NGINX_TEMPLATE_FILEPATH) - os.remove(NGINX_CONFIG_FILEPATH) - except FileNotFoundError: - pass + yield + finally: + if os.path.isfile(NGINX_TEMPLATE_FILEPATH): + os.remove(NGINX_TEMPLATE_FILEPATH) + if os.path.isfile(NGINX_CONFIG_FILEPATH): + os.remove(NGINX_CONFIG_FILEPATH) @pytest.mark.parametrize( From d7d46623d9de2e9baadc379771c4fa8d0a7b32a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 6 May 2025 18:15:51 +0100 Subject: [PATCH 056/332] Continued addressing comments from PR # 851. Removed pass statements --- node_cli/cli/mirage_node.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index f4ac7a1b..e2b0163c 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -34,7 +34,6 @@ def mirage_node_cli(): @check_inited def init_node(): click.echo("Placeholder: Command 'mirage node init' is not yet implemented.") - pass @mirage_node_cli.command( @@ -43,7 +42,6 @@ def init_node(): @check_inited def register_node(): click.echo("Placeholder: Command 'mirage node register' is not yet implemented.") - pass @mirage_node_cli.command('update', help='Update Mirage.') @@ -60,7 +58,6 @@ def register_node(): @streamed_cmd def update_node(env_file, pull_config_for_schain, unsafe_ok): click.echo("Placeholder: Command 'mirage node update' is not yet implemented.") - pass @mirage_node_cli.command('signature', help='Get mirage node signature for a validator ID.') From 24922dddb2849c16c8202585eba98af5e01fc4d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 8 May 2025 15:30:08 +0100 Subject: [PATCH 057/332] Continued addressing comments in PR # 851 --- .github/workflows/test.yml | 2 +- .gitignore | 1 + node_cli/cli/node.py | 5 ++-- node_cli/core/mirage_boot.py | 3 -- tests/.skale/config/nginx.conf.j2 | 47 ------------------------------- tests/core/core_mirage_test.py | 3 -- 6 files changed, 4 insertions(+), 57 deletions(-) delete mode 100644 tests/.skale/config/nginx.conf.j2 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 95087baa..04bde884 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,7 +28,7 @@ jobs: - name: Install python dependencies run: | python -m pip install --upgrade pip - pip install -e .[dev] + pip install -e ".[dev]" - name: Generate info run: ./scripts/generate_info.sh 1.0.0 my-branch normal diff --git a/.gitignore b/.gitignore index 9ce8b81c..e183f4a7 100644 --- a/.gitignore +++ b/.gitignore @@ -122,3 +122,4 @@ test-env nginx.conf tests/.skale/node_data/docker.json tests/.skale/node_data/node_options.json +tests/.skale/config/nginx.conf.j2 \ No newline at end of file diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index b0c6c696..97b6ef19 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -42,7 +42,6 @@ from node_cli.utils.helper import abort_if_false, safe_load_texts, streamed_cmd, IP_TYPE from node_cli.utils.meta import get_meta_info from node_cli.utils.print_formatters import print_meta_info -from node_cli.utils.node_type import NodeType TEXTS = safe_load_texts() @@ -104,7 +103,7 @@ def update_node(env_file, pull_config_for_schain, unsafe_ok): update( env_filepath=env_file, pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.REGULAR, + node_type=TYPE, unsafe_ok=unsafe_ok, ) @@ -228,7 +227,7 @@ def _set_domain_name(domain): help='Network to check', ) def check(network): - run_checks(node_type=NodeType.REGULAR, network=network) + run_checks(node_type=TYPE, network=network) @node.command(help='Reconfigure nftables rules') diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index c8a02f40..79d453b3 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -22,7 +22,6 @@ import time from node_cli.configs import TM_INIT_TIMEOUT -from node_cli.core.resources import update_resource_allocation from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.operations import init_mirage_boot_op, migrate_mirage_boot_op from node_cli.utils.decorators import check_not_inited, check_inited, check_user @@ -50,8 +49,6 @@ def init(env_filepath: str) -> None: time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) - logger.info('Generating mirage resource allocation file ...') - update_resource_allocation(env['ENV_TYPE']) logger.info('Init mirage procedure finished') diff --git a/tests/.skale/config/nginx.conf.j2 b/tests/.skale/config/nginx.conf.j2 deleted file mode 100644 index dc264362..00000000 --- a/tests/.skale/config/nginx.conf.j2 +++ /dev/null @@ -1,47 +0,0 @@ -limit_req_zone $binary_remote_addr zone=one:10m rate=7r/s; - -server { - listen 3009; - - {% if ssl %} - listen 311 ssl; - ssl_certificate /ssl/ssl_cert; - ssl_certificate_key /ssl/ssl_key; - {% endif %} - - proxy_read_timeout 500s; - proxy_connect_timeout 500s; - proxy_send_timeout 500s; - - error_log /var/log/nginx/error.log warn; - client_max_body_size 20m; - - server_name localhost; - limit_req zone=one burst=10; - - location / { - include uwsgi_params; - uwsgi_read_timeout 500s; - uwsgi_socket_keepalive on; - uwsgi_pass 127.0.0.1:3010; - } -} - -server { - listen 80; - - {% if ssl %} - listen 443 ssl; - ssl_certificate /ssl/ssl_cert; - ssl_certificate_key /ssl/ssl_key; - {% endif %} - - error_log /var/log/nginx/error.log warn; - client_max_body_size 20m; - server_name localhost; - limit_req zone=one burst=50; - - location / { - root /filestorage; - } -} \ No newline at end of file diff --git a/tests/core/core_mirage_test.py b/tests/core/core_mirage_test.py index 2063ac8f..38f2ec37 100644 --- a/tests/core/core_mirage_test.py +++ b/tests/core/core_mirage_test.py @@ -32,7 +32,6 @@ def test_restore_mirage( mock_sleep.assert_called_once() -@mock.patch('node_cli.core.mirage_boot.update_resource_allocation') @mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) @mock.patch('node_cli.core.mirage_boot.time.sleep') @mock.patch('node_cli.core.mirage_boot.init_mirage_boot_op') @@ -42,7 +41,6 @@ def test_init_mirage_boot( mock_init_op, mock_sleep, mock_is_alive, - mock_update_alloc, valid_env_file, ensure_meta_removed, ): @@ -59,7 +57,6 @@ def test_init_mirage_boot( mock_init_op.assert_called_once_with(valid_env_file, mock_env) mock_sleep.assert_called_once() mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) - mock_update_alloc.assert_called_once_with(mock_env['ENV_TYPE']) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) From 5f105cc52a9d875c92043d514b236813a89ce52c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 8 May 2025 15:56:12 +0100 Subject: [PATCH 058/332] Refactored publish pipeline to avoid code duplication --- .github/workflows/publish.yml | 243 ++++++++++------------------------ 1 file changed, 71 insertions(+), 172 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index a043862f..4365594a 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -20,12 +20,12 @@ jobs: branch: ${{ steps.export_outputs.outputs.branch }} steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: submodules: true - name: Checkout submodules - run: git submodule update --init + run: git submodule update --init --recursive - name: Install ubuntu dependencies run: | @@ -55,185 +55,84 @@ jobs: release_name: ${{ env.VERSION }} draft: false prerelease: ${{ env.PRERELEASE }} + - name: Export outputs id: export_outputs run: | echo "::set-output name=version::$VERSION" echo "::set-output name=branch::$BRANCH" - build_and_publish_normal: - if: github.event.pull_request.merged - needs: create_release - name: Build and publish for ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - matrix: - include: - - os: ubuntu-22.04 - asset_name: skale-${{ needs.create_release.outputs.version }}-Linux-x86_64 - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.11 - uses: actions/setup-python@v1 - with: - python-version: 3.11 - - - name: Install ubuntu dependencies - if: matrix.os == 'ubuntu-22.04' - run: | - sudo apt-get update - - - name: Checkout submodules - run: git submodule update --init - - - name: Build normal binary - run: | - mkdir -p ./dist - docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} normal - ls -altr /home/ubuntu/dist/ - docker rm -f $(docker ps -aq) - - - name: Save sha512sum - run: | - sudo sha512sum /home/ubuntu/dist/${{ matrix.asset_name }} | sudo tee > /dev/null /home/ubuntu/dist/sha512sum - - - name: Upload release binary - id: upload-release-asset - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.upload_url }} - asset_path: /home/ubuntu/dist/${{ matrix.asset_name }} - asset_name: ${{ matrix.asset_name }} - asset_content_type: application/octet-stream - - - name: Upload release checksum - id: upload-release-checksum - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.upload_url }} - asset_path: /home/ubuntu/dist/sha512sum - asset_name: ${{ matrix.asset_name }}.sha512 - asset_content_type: text/plain - - build_and_publish_sync: + build_and_publish: if: github.event.pull_request.merged needs: create_release - name: Build and publish for ${{ matrix.os }} + name: Build and publish ${{ matrix.build_type }} for ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: matrix: - include: - - os: ubuntu-22.04 - asset_name: skale-${{ needs.create_release.outputs.version }}-Linux-x86_64-sync + os: [ubuntu-22.04] + build_type: [normal, sync, mirage] steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.11 - uses: actions/setup-python@v1 - with: - python-version: 3.11 - - - name: Install ubuntu dependencies - if: matrix.os == 'ubuntu-22.04' - run: | - sudo apt-get update - - - name: Checkout submodules - run: git submodule update --init - - - name: Build sync release binary - run: | - mkdir -p ./dist - docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} sync - ls -altr /home/ubuntu/dist/ - docker rm -f $(docker ps -aq) - - - name: Save sha512sum - run: | - sudo sha512sum /home/ubuntu/dist/${{ matrix.asset_name }} | sudo tee > /dev/null /home/ubuntu/dist/sha512sum - - - name: Upload release sync CLI - id: upload-sync-release-asset - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.upload_url }} - asset_path: /home/ubuntu/dist/${{ matrix.asset_name }} - asset_name: ${{ matrix.asset_name }} - asset_content_type: application/octet-stream - - - name: Upload release sync CLI checksum - id: upload-sync-release-checksum - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.upload_url }} - asset_path: /home/ubuntu/dist/sha512sum - asset_name: ${{ matrix.asset_name }}.sha512 - asset_content_type: text/plain - - build_and_publish_mirage: - if: github.event.pull_request.merged - needs: create_release - name: Build and publish for ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - matrix: - include: - - os: ubuntu-22.04 - asset_name: skale-${{ needs.create_release.outputs.version }}-Linux-x86_64-mirage - steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.11 - uses: actions/setup-python@v1 - with: - python-version: 3.11 - - - name: Install ubuntu dependencies - if: matrix.os == 'ubuntu-22.04' - run: | - sudo apt-get update - - - name: Checkout submodules - run: git submodule update --init - - - name: Build mirage release binary - run: | - mkdir -p ./dist - docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} mirage - ls -altr /home/ubuntu/dist/ - docker rm -f $(docker ps -aq) - - - name: Save sha512sum - run: | - sudo sha512sum /home/ubuntu/dist/${{ matrix.asset_name }} | sudo tee > /dev/null /home/ubuntu/dist/sha512sum - - - name: Upload release mirage CLI - id: upload-mirage-release-asset - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.upload_url }} - asset_path: /home/ubuntu/dist/${{ matrix.asset_name }} - asset_name: ${{ matrix.asset_name }} - asset_content_type: application/octet-stream - - - name: Upload release mirage CLI checksum - id: upload-mirage-release-checksum - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.upload_url }} - asset_path: /home/ubuntu/dist/sha512sum - asset_name: ${{ matrix.asset_name }}.sha512 - asset_content_type: text/plain + - name: Checkout code + uses: actions/checkout@v4 + with: + submodules: true + + - name: Set up Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: 3.11 + + - name: Install ubuntu dependencies + if: matrix.os == 'ubuntu-22.04' + run: | + sudo apt-get update + + - name: Ensure submodules are updated + run: git submodule update --init --recursive + + - name: Define Asset Name + id: asset_details + run: | + ASSET_BASE_NAME="skale-${{ needs.create_release.outputs.version }}-Linux-x86_64" + if [[ "${{ matrix.build_type }}" == "normal" ]]; then + echo "FINAL_ASSET_NAME=${ASSET_BASE_NAME}" >> $GITHUB_OUTPUT + else + echo "FINAL_ASSET_NAME=${ASSET_BASE_NAME}-${{ matrix.build_type }}" >> $GITHUB_OUTPUT + fi + + - name: Build ${{ matrix.build_type }} release binary + run: | + mkdir -p ${{ github.workspace }}/dist + docker build . -t node-cli-builder + docker run --rm -v ${{ github.workspace }}/dist:/app/dist node-cli-builder \ + scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} ${{ matrix.build_type }} + echo "Contents of dist directory:" + ls -altr ${{ github.workspace }}/dist/ + docker rm -f $(docker ps -aq) + + - name: Save sha512sum for ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} + run: | + cd ${{ github.workspace }}/dist + sha512sum ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} > ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512sum + echo "Checksum file created: ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512sum" + cat ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512sum + + - name: Upload release binary (${{ matrix.build_type }}) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ needs.create_release.outputs.upload_url }} + asset_path: ${{ github.workspace }}/dist/${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} + asset_name: ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} + asset_content_type: application/octet-stream + + - name: Upload release checksum (${{ matrix.build_type }}) + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ needs.create_release.outputs.upload_url }} + asset_path: ${{ github.workspace }}/dist/${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512sum + asset_name: ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512 + asset_content_type: text/plain \ No newline at end of file From 6997ef26a08412c2f7390d442d748c12885646d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 8 May 2025 19:00:39 +0100 Subject: [PATCH 059/332] Added `update` command to mirage CLI --- node_cli/cli/mirage_boot.py | 23 ++++++++++++++++++- node_cli/core/mirage_boot.py | 30 ++++++++++++++++++++++--- node_cli/operations/__init__.py | 1 + node_cli/operations/base.py | 40 +++++++++++++++++++++++++++++++++ tests/core/core_mirage_test.py | 39 ++++++++++++++++++++++++++++++-- 5 files changed, 127 insertions(+), 6 deletions(-) diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py index 8130c253..ae8c4999 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/mirage_boot.py @@ -20,7 +20,7 @@ import click from node_cli.core.node import get_node_signature, register_node as register -from node_cli.core.mirage_boot import init, migrate +from node_cli.core.mirage_boot import init, migrate, update from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.utils.helper import streamed_cmd, IP_TYPE, error_exit, abort_if_false @@ -84,3 +84,24 @@ def signature_boot(validator_id): @streamed_cmd def migrate_boot(env_file, pull_config_for_schain): migrate(env_file, pull_config_for_schain) + + +@mirage_boot_cli.command('update', help='Update Mirage node from .env file') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to update Mirage node software?', +) +@click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) +@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) +@click.argument('env_file') +@streamed_cmd +def update_node(env_file, pull_config_for_schain, unsafe_ok): + update( + env_filepath=env_file, + pull_config_for_schain=pull_config_for_schain, + node_type=TYPE, + unsafe_ok=unsafe_ok, + ) diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index 79d453b3..a060882f 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -23,7 +23,7 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.operations import init_mirage_boot_op, migrate_mirage_boot_op +from node_cli.operations import init_mirage_boot_op, migrate_mirage_boot_op, update_mirage_boot_op from node_cli.utils.decorators import check_not_inited, check_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit @@ -55,7 +55,7 @@ def init(env_filepath: str) -> None: @check_inited @check_user def migrate(env_filepath: str, pull_config_for_schain: str) -> None: - logger.info('Node update started') + logger.info('Mirage node migration started') env = compose_node_env( env_filepath, inited_node=True, @@ -67,9 +67,33 @@ def migrate(env_filepath: str, pull_config_for_schain: str) -> None: if migrate_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True) + alive = is_base_containers_alive(node_type=NodeType.MIRAGE) if not migrate_ok or not alive: print_node_cmd_error() return else: logger.info('Node migration from Mirage Boot to Mirage Main finished successfully!') + + +@check_inited +@check_user +def update(env_filepath: str, pull_config_for_schain: str) -> None: + logger.info('Mirage boot node update started') + env = compose_node_env( + env_filepath, + inited_node=True, + sync_schains=False, + pull_config_for_schain=pull_config_for_schain, + node_type=NodeType.MIRAGE, + is_mirage_boot=True, + ) + migrate_ok = update_mirage_boot_op(env_filepath, env) + if migrate_ok: + logger.info('Waiting for containers initialization') + time.sleep(TM_INIT_TIMEOUT) + alive = is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True) + if not migrate_ok or not alive: + print_node_cmd_error() + return + else: + logger.info('Mirage boot node update finished successfully!') diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 17706533..159c0e16 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -23,6 +23,7 @@ init_sync as init_sync_op, init_mirage_boot as init_mirage_boot_op, migrate_mirage_boot as migrate_mirage_boot_op, + update_mirage_boot as update_mirage_boot_op, update_sync as update_sync_op, turn_off as turn_off_op, turn_on as turn_on_op, diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index ea78cff0..a2f80ff9 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -186,6 +186,46 @@ def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: return True +@checked_host +def update_mirage_boot(env_filepath: str, env: Dict) -> bool: + compose_rm(node_type=NodeType.MIRAGE, env=env) + remove_dynamic_containers() + + sync_skale_node() + ensure_btrfs_kernel_module_autoloaded() + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) + configure_nftables(enable_monitoring=enable_monitoring) + + generate_nginx_config() + + prepare_host(env_filepath, env['ENV_TYPE']) + + current_stream = get_meta_info().config_stream + skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' + if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + logger.info( + 'Stream version was changed from %s to %s', + current_stream, + env['CONTAINER_CONFIGS_STREAM'], + ) + docker_cleanup() + + update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + env['DOCKER_LVMPY_STREAM'], + distro.id(), + distro.version(), + ) + update_images(env=env) + compose_up(env=env, node_type=NodeType.MIRAGE) + return True + + @checked_host def init(env_filepath: str, env: dict, node_type: NodeType) -> None: sync_skale_node() diff --git a/tests/core/core_mirage_test.py b/tests/core/core_mirage_test.py index 38f2ec37..68ac1ab5 100644 --- a/tests/core/core_mirage_test.py +++ b/tests/core/core_mirage_test.py @@ -1,7 +1,7 @@ from unittest import mock from node_cli.configs import SKALE_DIR -from node_cli.core.mirage_boot import init as init_boot, migrate +from node_cli.core.mirage_boot import init as init_boot, migrate, update from node_cli.core.mirage_node import restore_mirage from node_cli.utils.node_type import NodeType @@ -77,7 +77,7 @@ def test_migrate_mirage_boot( mock_env = {'ENV_TYPE': 'devnet-mirage'} mock_compose_env.return_value = mock_env mock_migrate_op.return_value = True - pull_config_for_schain = 'some_schain' + pull_config_for_schain = 'mirage' migrate(valid_env_file, pull_config_for_schain) @@ -90,4 +90,39 @@ def test_migrate_mirage_boot( ) mock_migrate_op.assert_called_once_with(valid_env_file, mock_env) mock_sleep.assert_called_once() + mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.core.mirage_boot.time.sleep') +@mock.patch('node_cli.core.mirage_boot.update_mirage_boot_op') +@mock.patch('node_cli.core.mirage_boot.compose_node_env') +def test_update_mirage_boot( + mock_compose_env, + mock_update_op, + mock_sleep, + mock_is_alive, + mock_is_user_valid, + valid_env_file, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet-mirage'} + mock_compose_env.return_value = mock_env + mock_update_op.return_value = True + pull_config_for_schain = 'mirage' + + update(valid_env_file, pull_config_for_schain) + + mock_compose_env.assert_called_once_with( + valid_env_file, + inited_node=True, + sync_schains=False, + pull_config_for_schain=pull_config_for_schain, + node_type=NodeType.MIRAGE, + is_mirage_boot=True, + ) + mock_update_op.assert_called_once_with(valid_env_file, mock_env) + mock_sleep.assert_called_once() mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) From 2473c70aa82cd2134cb7dfac85e1cd759ab05bf7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Thu, 8 May 2025 19:00:53 +0100 Subject: [PATCH 060/332] Fixed unnecessary arguments --- node_cli/cli/mirage_boot.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py index ae8c4999..4419b92a 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/mirage_boot.py @@ -95,13 +95,10 @@ def migrate_boot(env_file, pull_config_for_schain): prompt='Are you sure you want to update Mirage node software?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) -@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) @click.argument('env_file') @streamed_cmd -def update_node(env_file, pull_config_for_schain, unsafe_ok): +def update_node(env_file, pull_config_for_schain): update( env_filepath=env_file, pull_config_for_schain=pull_config_for_schain, - node_type=TYPE, - unsafe_ok=unsafe_ok, ) From 12dfa782408516c66734857ca23d42c569e5143f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 12 May 2025 17:03:52 +0100 Subject: [PATCH 061/332] Updated codebase to use mirage_static_params.yaml for Mirage nodes. - Updated static param loading to be NodeType-aware. - Normalized ENV_TYPEs for Mirage. - Bypassed resource allocation for Mirage init/restore. - Updated is_node_inited to use nginx.conf. --- node_cli/configs/__init__.py | 1 + node_cli/configs/env.py | 23 ++++++----------------- node_cli/core/checks.py | 20 ++++++++++++++++---- node_cli/core/host.py | 4 ++-- node_cli/core/node.py | 2 +- node_cli/operations/base.py | 9 +++++++-- 6 files changed, 33 insertions(+), 26 deletions(-) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 5056c58c..668e8ea1 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -58,6 +58,7 @@ SYNC_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-sync.yml') MIRAGE_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-mirage.yml') STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') +MIRAGE_STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'mirage_static_params.yaml') NGINX_TEMPLATE_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'nginx.conf.j2') NGINX_CONFIG_FILEPATH = os.path.join(NODE_DATA_PATH, 'nginx.conf') diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 755014b6..9979beef 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -30,9 +30,7 @@ SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') -ALLOWED_SKALE_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet'] -ALLOWED_MIRAGE_ENV_TYPES = ['mainnet-mirage', 'devnet-mirage'] -ALLOWED_ENV_TYPES = [*ALLOWED_SKALE_ENV_TYPES, *ALLOWED_MIRAGE_ENV_TYPES] +ALLOWED_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet'] CORE_REQUIRED_PARAMS: Dict[str, str] = { 'CONTAINER_CONFIGS_STREAM': '', @@ -96,7 +94,7 @@ def get_validated_env_config( load_env_file(env_filepath) params = build_env_params(node_type=node_type, is_mirage_boot=is_mirage_boot) populate_env_params(params) - validate_env_params(node_type=node_type, params=params) + validate_env_params(params=params) return params @@ -130,13 +128,12 @@ def populate_env_params(params: Dict[str, str]) -> None: def validate_env_params( - node_type: NodeType, params: Dict[str, str], ) -> None: missing = absent_required_params(params) if missing: error_exit(f'Missing required parameters: {missing}') - validate_env_type(node_type=node_type, env_type=params['ENV_TYPE']) + validate_env_type(env_type=params['ENV_TYPE']) endpoint = params['ENDPOINT'] validate_env_alias_or_address(params['MANAGER_CONTRACTS'], ContractType.MANAGER, endpoint) @@ -144,14 +141,6 @@ def validate_env_params( validate_env_alias_or_address(params['IMA_CONTRACTS'], ContractType.IMA, endpoint) -def validate_env_type(node_type: NodeType, env_type: str) -> None: - allowed_env_types_for_node_type = list() - if node_type == NodeType.MIRAGE: - allowed_env_types_for_node_type = ALLOWED_MIRAGE_ENV_TYPES - else: - allowed_env_types_for_node_type = ALLOWED_SKALE_ENV_TYPES - - if env_type not in allowed_env_types_for_node_type: - error_exit( - f'Allowed ENV_TYPE values are {allowed_env_types_for_node_type}. Actual: "{env_type}"' - ) +def validate_env_type(env_type: str) -> None: + if env_type not in ALLOWED_ENV_TYPES: + error_exit(f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. Actual: "{env_type}"') diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index c4afee68..1428c440 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -56,9 +56,11 @@ DOCKER_DAEMON_HOSTS, REPORTS_PATH, STATIC_PARAMS_FILEPATH, + MIRAGE_STATIC_PARAMS_FILEPATH, ) from node_cli.core.host import is_ufw_ipv6_chain_exists, is_ufw_ipv6_option_enabled from node_cli.core.resources import get_disk_size +from node_cli.utils.docker_utils import NodeType from node_cli.utils.helper import run_cmd, safe_mkdir logger = logging.getLogger(__name__) @@ -76,9 +78,18 @@ FuncList = List[Func] -def get_static_params(env_type: str = 'mainnet', config_path: str = CONTAINER_CONFIG_PATH) -> Dict: - status_params_filename = os.path.basename(STATIC_PARAMS_FILEPATH) - static_params_filepath = os.path.join(config_path, status_params_filename) +def get_static_params( + node_type: NodeType, + env_type: str = 'mainnet', + config_path: str = CONTAINER_CONFIG_PATH, +) -> Dict: + if node_type == NodeType.MIRAGE: + static_params_base_filepath = MIRAGE_STATIC_PARAMS_FILEPATH + else: + static_params_base_filepath = STATIC_PARAMS_FILEPATH + + static_params_filename = os.path.basename(static_params_base_filepath) + static_params_filepath = os.path.join(config_path, static_params_filename) with open(static_params_filepath) as requirements_file: ydata = yaml.load(requirements_file, Loader=yaml.Loader) return ydata['envs'][env_type] @@ -475,12 +486,13 @@ def get_all_checkers(disk: str, requirements: Dict) -> List[BaseChecker]: def run_checks( disk: str, + node_type: NodeType, env_type: str = 'mainnet', config_path: str = CONTAINER_CONFIG_PATH, check_type: CheckType = CheckType.ALL, ) -> ResultList: logger.info('Executing checks. Type: %s', check_type) - requirements = get_static_params(env_type, config_path) + requirements = get_static_params(node_type, env_type, config_path) checkers = get_all_checkers(disk, requirements) checks = get_checks(checkers, check_type) results = [check() for check in checks] diff --git a/node_cli/core/host.py b/node_cli/core/host.py index f4d4e790..e3279b75 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -47,8 +47,8 @@ SKALE_TMP_DIR, UFW_CONFIG_PATH, UFW_IPV6_BEFORE_INPUT_CHAIN, + NGINX_CONFIG_FILEPATH, ) -from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH from node_cli.configs.cli_logger import LOG_DATA_PATH from node_cli.configs.env import SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH from node_cli.core.nftables import NFTablesManager @@ -103,7 +103,7 @@ def prepare_host(env_filepath: str, env_type: str, allocation: bool = False) -> def is_node_inited() -> bool: - return os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) + return os.path.isfile(NGINX_CONFIG_FILEPATH) def make_dirs(): diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 76552fe4..19f4a688 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -504,7 +504,7 @@ def run_checks( if disk is None: env = get_validated_env_config(node_type=node_type) disk = env['DISK_MOUNTPOINT'] - failed_checks = run_host_checks(disk, network, container_config_path) + failed_checks = run_host_checks(disk, node_type, network, container_config_path) if not failed_checks: print('Requirements checking successfully finished!') else: diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index a2f80ff9..c03f7f35 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -45,7 +45,7 @@ update_node_cli_schain_status, cleanup_sync_datadir, ) -from node_cli.cli.info import VERSION +from node_cli.cli.info import VERSION, TYPE from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive from node_cli.operations.docker_lvmpy import lvmpy_install from node_cli.operations.skale_node import ( @@ -79,6 +79,7 @@ def wrapper(env_filepath: str, env: Dict, *args, **kwargs): download_skale_node(env.get('CONTAINER_CONFIGS_STREAM'), env.get('CONTAINER_CONFIGS_DIR')) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], + TYPE, env['ENV_TYPE'], CONTAINER_CONFIG_TMP_PATH, check_type=CheckType.PREINSTALL, @@ -93,6 +94,7 @@ def wrapper(env_filepath: str, env: Dict, *args, **kwargs): failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], + TYPE, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, @@ -285,7 +287,6 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: distro.id(), distro.version(), ) - update_resource_allocation(env_type=env['ENV_TYPE']) update_images(env=env) compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) @@ -407,6 +408,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], + TYPE, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.PREINSTALL, @@ -441,6 +443,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], + TYPE, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, @@ -455,6 +458,7 @@ def restore_mirage(env, backup_path, config_only=False): unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], + TYPE, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.PREINSTALL, @@ -486,6 +490,7 @@ def restore_mirage(env, backup_path, config_only=False): failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], + TYPE, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, From 3bd4b73046e4692109d9610e672fef2e84fcddb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 12 May 2025 17:15:30 +0100 Subject: [PATCH 062/332] Removed unexistent import --- node_cli/cli/node.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 97b6ef19..4c9e40e3 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -37,7 +37,7 @@ run_checks, ) from node_cli.configs import DEFAULT_NODE_BASE_PORT -from node_cli.configs.env import ALLOWED_SKALE_ENV_TYPES +from node_cli.configs.env import ALLOWED_ENV_TYPES from node_cli.utils.decorators import check_inited from node_cli.utils.helper import abort_if_false, safe_load_texts, streamed_cmd, IP_TYPE from node_cli.utils.meta import get_meta_info @@ -222,7 +222,7 @@ def _set_domain_name(domain): @click.option( '--network', '-n', - type=click.Choice(ALLOWED_SKALE_ENV_TYPES), + type=click.Choice(ALLOWED_ENV_TYPES), default='mainnet', help='Network to check', ) From 0f0ab35c2e0e9c089902bc6b90f468044f725a1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 12 May 2025 17:28:33 +0100 Subject: [PATCH 063/332] Fixed `configs_env_validate_test.py` tests --- tests/configs/configs_env_validate_test.py | 45 ++++++---------------- 1 file changed, 12 insertions(+), 33 deletions(-) diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index e4e6df02..d8ef077f 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -12,8 +12,7 @@ get_validated_env_config, validate_env_params, validate_env_type, - ALLOWED_SKALE_ENV_TYPES, - ALLOWED_MIRAGE_ENV_TYPES, + ALLOWED_ENV_TYPES, REQUIRED_PARAMS_SKALE, REQUIRED_PARAMS_SYNC, REQUIRED_PARAMS_MIRAGE_BOOT, @@ -112,43 +111,23 @@ def test_build_env_params_keys(node_type, is_mirage_boot, expected_keys, unexpec @pytest.mark.parametrize( - 'node_type, env_types, should_fail', + 'env_types, should_fail', [ - (NodeType.REGULAR, ALLOWED_SKALE_ENV_TYPES, False), - (NodeType.REGULAR, ALLOWED_MIRAGE_ENV_TYPES, True), - (NodeType.SYNC, ALLOWED_SKALE_ENV_TYPES, False), - (NodeType.SYNC, ALLOWED_MIRAGE_ENV_TYPES, True), - (NodeType.MIRAGE, ALLOWED_MIRAGE_ENV_TYPES, False), - (NodeType.MIRAGE, ALLOWED_SKALE_ENV_TYPES, True), - (NodeType.REGULAR, ['invalid'], True), - (NodeType.SYNC, ['invalid'], True), - (NodeType.MIRAGE, ['invalid'], True), + (ALLOWED_ENV_TYPES, False), + (['invalid'], True), ], ids=[ - 'correct_env_regular', - 'incorrect_env_regular', - 'correct_env_sync', - 'incorrect_env_sync', - 'correct_env_mirage', - 'incorrect_env_mirage', - 'invalid_regular', - 'invalid_sync', - 'invalid_mirage', + 'correct_env', + 'invalid_env', ], ) -def test_valid_env_types(node_type, env_types, should_fail): +def test_env_types(env_types, should_fail): for env_type in env_types: if should_fail: with pytest.raises(SystemExit): - validate_env_type(node_type=node_type, env_type=env_type) + validate_env_type(env_type=env_type) else: - validate_env_type(node_type=node_type, env_type=env_type) - - -def test_invalid_env_type(): - with pytest.raises(SystemExit) as excinfo: - validate_env_type(node_type=NodeType.REGULAR, env_type='invalid') - assert excinfo.value.code == CLIExitCodes.FAILURE.value + validate_env_type(env_type=env_type) def test_get_chain_id_success(monkeypatch): @@ -244,7 +223,7 @@ def test_validate_env_alias_or_address_with_alias(requests_mock): validate_env_alias_or_address('test-alias', ContractType.IMA, ENDPOINT) -@pytest.mark.parametrize('env_type', ALLOWED_MIRAGE_ENV_TYPES) +@pytest.mark.parametrize('env_type', ALLOWED_ENV_TYPES) @pytest.mark.parametrize( 'required_params, key_to_remove, should_fail', [ @@ -280,9 +259,9 @@ def test_validate_env_params_mirage( if should_fail: with pytest.raises(SystemExit): - validate_env_params(node_type=NodeType.MIRAGE, params=params) + validate_env_params(params=params) else: - validate_env_params(node_type=NodeType.MIRAGE, params=params) + validate_env_params(params=params) @pytest.mark.parametrize( From 0c33f0f863bb3ca0f936cc3db7fc557846aa4ad3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 12 May 2025 17:45:47 +0100 Subject: [PATCH 064/332] Continued work in fixing tests after mirage static params changes --- README.md | 2 +- tests/.skale/config/static_params.yaml | 57 ------ tests/conftest.py | 242 +++++++++++++++++++++---- tests/core/core_node_test.py | 2 +- 4 files changed, 204 insertions(+), 99 deletions(-) diff --git a/README.md b/README.md index 11e06eae..b4d0e691 100644 --- a/README.md +++ b/README.md @@ -700,7 +700,7 @@ Required environment variables in `ENV_FILE`: - `MANAGER_CONTRACTS` - SKALE Manager alias or address. - `IMA_CONTRACTS` - IMA alias or address (_Note: Required by boot service, may not be used by Mirage itself_). - `FILEBEAT_HOST` - URL/IP:Port of the Filebeat log server. -- `ENV_TYPE` - Environment type (e.g., 'mainnet-mirage', 'devnet-mirage'). +- `ENV_TYPE` - Environment type (e.g., 'mainnet', 'devnet'). Optional variables: diff --git a/tests/.skale/config/static_params.yaml b/tests/.skale/config/static_params.yaml index 09b51ae6..a0ea143f 100644 --- a/tests/.skale/config/static_params.yaml +++ b/tests/.skale/config/static_params.yaml @@ -16,63 +16,6 @@ common: db_storage: 0.2 # leveldb may use x2 storage, so 0.4 divided by 2, actually using 0.4 shared_space_coefficient: 1 envs: - mainnet-mirage: - server: - cpu_total: 8 - cpu_physical: 1 - memory: 32000000000 - swap: 16000000000 - disk: 500000000000 - - package: - iptables-persistent: 1.0.4 - btrfs-progs: 4.15.1 - lsof: "4.89" - psmisc: 23.1-1 - lvm2: disabled - - docker: - docker-api: 1.41.0 - docker-engine: 20.10.7 - docker-compose: 1.27.4 - - schain: - snapshotIntervalSec: 86400 - emptyBlockIntervalMs: 10000 - snapshotDownloadTimeout: 18000 - snapshotDownloadInactiveTimeout: 120 - - schain_cmd: ["-v 2", "--aa no"] - - devnet-mirage: - server: - cpu_total: 1 - cpu_physical: 1 - memory: 2000000000 - swap: 2000000000 - disk: 80000000000 - - package: - iptables-persistent: 1.0.4 - btrfs-progs: 4.15.1 - lsof: "4.89" - psmisc: 23.1-1 - lvm2: disabled - - docker: - docker-api: 1.41.0 - docker-engine: 20.10.7 - docker-compose: 1.27.4 - - schain: - snapshotIntervalSec: 86400 - emptyBlockIntervalMs: 10000 - snapshotDownloadTimeout: 18000 - snapshotDownloadInactiveTimeout: 120 - - schain_cmd: - ["-v 3", "--web3-trace", "--enable-debug-behavior-apis", "--aa no"] - mainnet: server: cpu_total: 8 diff --git a/tests/conftest.py b/tests/conftest.py index 65cb19ec..3bc4e0fb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -37,6 +37,7 @@ NGINX_CONTAINER_NAME, REMOVED_CONTAINERS_FOLDER_PATH, STATIC_PARAMS_FILEPATH, + MIRAGE_STATIC_PARAMS_FILEPATH, SCHAIN_NODE_DATA_PATH, ) from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH @@ -49,46 +50,6 @@ TEST_ENV_PARAMS = """ -mainnet-mirage: - server: - cpu_total: 8 - cpu_physical: 1 - memory: 32000000000 - swap: 16000000000 - disk: 500000000000 - - package: - iptables-persistent: 1.0.4 - btrfs-progs: 4.15.1 - lsof: "4.89" - psmisc: 23.1-1 - lvm2: disabled - - docker: - docker-api: 1.41.0 - docker-engine: 20.10.7 - docker-compose: 1.27.4 - - devnet-mirage: - server: - cpu_total: 1 - cpu_physical: 1 - memory: 2000000000 - swap: 2000000000 - disk: 80000000000 - - package: - iptables-persistent: 1.0.4 - btrfs-progs: 4.15.1 - lsof: "4.89" - psmisc: 23.1-1 - lvm2: disabled - - docker: - docker-api: 1.41.0 - docker-engine: 20.10.7 - docker-compose: 1.27.4 - mainnet: server: cpu_total: 4 @@ -149,6 +110,197 @@ docker-engine: 1.1.3 """ +MIRAGE_TEST_ENV_PARAMS = """ +common: + schain: + shared_space_coefficient: 1 +envs: + mainnet: + server: + cpu_total: 8 + cpu_physical: 1 + memory: 32000000000 + swap: 16000000000 + disk: 1900000000000 + + package: + iptables-persistent: 1.0.4 + lvm2: disabled + btrfs-progs: 4.15.1 + lsof: "4.89" + psmisc: 23.1-1 + + docker: + docker-api: 1.41.0 + docker-engine: 20.10.7 + docker-compose: 1.27.4 + + schain: + snapshotIntervalSec: 86400 + emptyBlockIntervalMs: 10000 + snapshotDownloadTimeout: 18000 + snapshotDownloadInactiveTimeout: 120 + contractStorageLimit: 1000000000000000000 + dbStorageLimit: 1000000000000000000 + maxConsensusStorageBytes: 1000000000000000000 + + skaled_cmd: ["-v 2", "--aa no"] + + node: + bindIP: "0.0.0.0" + logLevel: "info" + logLevelConfig: "info" + pg-threads: 10 + pg-threads-limit: 10 + minCacheSize: 8000000 + maxCacheSize: 16000000 + collectionQueueSize: 20 + collectionDuration: 60 + transactionQueueSize: 1000 + transactionQueueLimitBytes: 69206016 + futureTransactionQueueLimitBytes: 140509184 + maxOpenLeveldbFiles: 1000 + + testnet: + server: + cpu_total: 8 + cpu_physical: 1 + memory: 32000000000 + swap: 16000000000 + disk: 200000000000 + + package: + iptables-persistent: 1.0.4 + lvm2: disabled + btrfs-progs: 4.15.1 + lsof: "4.89" + psmisc: 23.1-1 + + docker: + docker-api: 1.41.0 + docker-engine: 20.10.7 + docker-compose: 1.27.4 + + schain: + snapshotIntervalSec: 86400 + emptyBlockIntervalMs: 10000 + snapshotDownloadTimeout: 18000 + snapshotDownloadInactiveTimeout: 120 + contractStorageLimit: 1000000000000000000 + dbStorageLimit: 1000000000000000000 + maxConsensusStorageBytes: 1000000000000000000 + + skaled_cmd: ["-v 2", "--aa no"] + + node: + bindIP: "0.0.0.0" + logLevel: "info" + logLevelConfig: "info" + pg-threads: 10 + pg-threads-limit: 10 + minCacheSize: 8000000 + maxCacheSize: 16000000 + collectionQueueSize: 20 + collectionDuration: 60 + transactionQueueSize: 1000 + transactionQueueLimitBytes: 69206016 + futureTransactionQueueLimitBytes: 140509184 + maxOpenLeveldbFiles: 1000 + + qanet: + server: + cpu_total: 8 + cpu_physical: 1 + memory: 32000000000 + swap: 16000000000 + disk: 200000000000 + + package: + iptables-persistent: 1.0.4 + lvm2: disabled + btrfs-progs: 4.15.1 + lsof: "4.89" + psmisc: 23.1-1 + + docker: + docker-api: 1.41.0 + docker-engine: 20.10.7 + docker-compose: 1.27.4 + + schain: + snapshotIntervalSec: 3600 + emptyBlockIntervalMs: 10000 + snapshotDownloadTimeout: 18000 + snapshotDownloadInactiveTimeout: 120 + contractStorageLimit: 1000000000000000000 + dbStorageLimit: 1000000000000000000 + maxConsensusStorageBytes: 1000000000000000000 + + skaled_cmd: ["-v 2", "--aa no"] + + node: + bindIP: "0.0.0.0" + logLevel: "info" + logLevelConfig: "info" + pg-threads: 10 + pg-threads-limit: 10 + minCacheSize: 8000000 + maxCacheSize: 16000000 + collectionQueueSize: 20 + collectionDuration: 60 + transactionQueueSize: 1000 + transactionQueueLimitBytes: 69206016 + futureTransactionQueueLimitBytes: 140509184 + maxOpenLeveldbFiles: 1000 + + devnet: + server: + cpu_total: 1 + cpu_physical: 1 + memory: 2000000000 + swap: 2000000000 + disk: 80000000000 + + package: + iptables-persistent: 1.0.4 + lvm2: disabled + btrfs-progs: 4.15.1 + lsof: "4.89" + psmisc: 23.1-1 + + docker: + docker-api: 1.41.0 + docker-engine: 20.10.7 + docker-compose: 1.27.4 + + schain: + snapshotIntervalSec: 3600 + emptyBlockIntervalMs: 10000 + snapshotDownloadTimeout: 18000 + snapshotDownloadInactiveTimeout: 120 + contractStorageLimit: 1000000000000000000 + dbStorageLimit: 1000000000000000000 + maxConsensusStorageBytes: 1000000000000000000 + + skaled_cmd: + ["-v 3", "--web3-trace", "--enable-debug-behavior-apis", "--aa no"] + + node: + bindIP: "0.0.0.0" + logLevel: "info" + logLevelConfig: "info" + pg-threads: 10 + pg-threads-limit: 10 + minCacheSize: 8000000 + maxCacheSize: 16000000 + collectionQueueSize: 20 + collectionDuration: 60 + transactionQueueSize: 1000 + transactionQueueLimitBytes: 69206016 + futureTransactionQueueLimitBytes: 140509184 + maxOpenLeveldbFiles: 1000 +""" + @pytest.fixture def net_params_file(): @@ -158,6 +310,16 @@ def net_params_file(): os.remove(STATIC_PARAMS_FILEPATH) +@pytest.fixture +def mirage_net_params_file(): + with open(MIRAGE_STATIC_PARAMS_FILEPATH, 'w') as f: + yaml.dump( + yaml.load(MIRAGE_TEST_ENV_PARAMS, Loader=yaml.Loader), stream=f, Dumper=yaml.Dumper + ) + yield MIRAGE_STATIC_PARAMS_FILEPATH + os.remove(MIRAGE_STATIC_PARAMS_FILEPATH) + + @pytest.fixture() def tmp_dir_path(): plain_path = 'tests/tmp/' diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 8cb5c7f6..571595ce 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -188,7 +188,7 @@ def test_compose_node_env( if node_type == NodeType.SYNC: mock_get_validated.return_value['ENV_TYPE'] = 'devnet' elif node_type == NodeType.MIRAGE: - mock_get_validated.return_value['ENV_TYPE'] = 'mainnet-mirage' + mock_get_validated.return_value['ENV_TYPE'] = 'mainnet' else: mock_get_validated.return_value['ENV_TYPE'] = 'mainnet' From f65fb92d195c4079e922542264b2bb2014133c03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Mon, 12 May 2025 18:39:23 +0100 Subject: [PATCH 065/332] Fixed all tests that were broken due to mirage static params --- node_cli/cli/mirage_node.py | 3 --- tests/cli/mirage_cli_test.py | 9 +++------ tests/cli/node_test.py | 8 ++++---- tests/configs/configs_env_validate_test.py | 2 +- tests/conftest.py | 12 ++++++++++++ tests/core/core_checks_test.py | 6 ++++-- tests/core/core_mirage_test.py | 2 ++ tests/core/core_node_test.py | 2 +- 8 files changed, 27 insertions(+), 17 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index e2b0163c..0fc32668 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -22,7 +22,6 @@ from node_cli.core.node import get_node_signature, backup from node_cli.core.mirage_node import restore_mirage from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false -from node_cli.utils.decorators import check_inited @click.group('node', help='Commands for regular Mirage Node operations.') @@ -31,7 +30,6 @@ def mirage_node_cli(): @mirage_node_cli.command('init', help='Initialize regular Mirage node operations (Placeholder).') -@check_inited def init_node(): click.echo("Placeholder: Command 'mirage node init' is not yet implemented.") @@ -39,7 +37,6 @@ def init_node(): @mirage_node_cli.command( 'register', help='Register Mirage node (Placeholder for regular operations).' ) -@check_inited def register_node(): click.echo("Placeholder: Command 'mirage node register' is not yet implemented.") diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py index b909b8d1..1a96c22a 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/mirage_cli_test.py @@ -84,8 +84,7 @@ def test_mirage_node_signature_error(mock_signature_core): assert error_msg in result.output -@mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True) -def test_mirage_node_init_placeholder(mock_is_inited, resource_alloc): +def test_mirage_node_init_placeholder(): runner = CliRunner() result = runner.invoke(init_node_placeholder, []) @@ -93,8 +92,7 @@ def test_mirage_node_init_placeholder(mock_is_inited, resource_alloc): assert "Placeholder: Command 'mirage node init' is not yet implemented." in result.output -@mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True) -def test_mirage_node_register_placeholder(mock_is_inited, resource_alloc): +def test_mirage_node_register_placeholder(): runner = CliRunner() result = runner.invoke(register_node_placeholder, []) @@ -102,8 +100,7 @@ def test_mirage_node_register_placeholder(mock_is_inited, resource_alloc): assert "Placeholder: Command 'mirage node register' is not yet implemented." in result.output -@mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True) -def test_mirage_node_update_placeholder(mock_is_inited, resource_alloc, valid_env_file): +def test_mirage_node_update_placeholder(valid_env_file): runner = CliRunner() result = runner.invoke(update_node_placeholder, ['--yes', valid_env_file]) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index c337e5cf..d1dc82b7 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -54,7 +54,7 @@ init_default_logger() -def test_register_node(resource_alloc, mocked_g_config): +def test_register_node(inited_node, resource_alloc, mocked_g_config): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True): result = run_command_mock( @@ -70,7 +70,7 @@ def test_register_node(resource_alloc, mocked_g_config): ) # noqa -def test_register_node_with_error(resource_alloc, mocked_g_config): +def test_register_node_with_error(inited_node, resource_alloc, mocked_g_config): resp_mock = response_mock( requests.codes.ok, {'status': 'error', 'payload': ['Strange error']}, @@ -89,7 +89,7 @@ def test_register_node_with_error(resource_alloc, mocked_g_config): ) -def test_register_node_with_prompted_ip(resource_alloc, mocked_g_config): +def test_register_node_with_prompted_ip(inited_node, resource_alloc, mocked_g_config): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True): result = run_command_mock( @@ -106,7 +106,7 @@ def test_register_node_with_prompted_ip(resource_alloc, mocked_g_config): ) -def test_register_node_with_default_port(resource_alloc, mocked_g_config): +def test_register_node_with_default_port(inited_node, resource_alloc, mocked_g_config): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True): result = run_command_mock( diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index d8ef077f..03c955e9 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -297,7 +297,7 @@ def test_get_validated_env_config_mirage_success( if key == 'ENDPOINT': env_value = ENDPOINT if key == 'ENV_TYPE': - env_value = 'devnet-mirage' + env_value = 'devnet' if key == 'MANAGER_CONTRACTS': env_value = '0x' + '1' * 40 if key == 'IMA_CONTRACTS': diff --git a/tests/conftest.py b/tests/conftest.py index 3bc4e0fb..31fb7cd9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -39,6 +39,7 @@ STATIC_PARAMS_FILEPATH, MIRAGE_STATIC_PARAMS_FILEPATH, SCHAIN_NODE_DATA_PATH, + NGINX_CONFIG_FILEPATH, ) from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH from node_cli.configs.ssl import SSL_FOLDER_PATH @@ -393,6 +394,17 @@ def resource_alloc(): os.remove(RESOURCE_ALLOCATION_FILEPATH) +@pytest.fixture +def inited_node(): + path = pathlib.Path(NGINX_CONFIG_FILEPATH) + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() + try: + yield + finally: + os.remove(NGINX_CONFIG_FILEPATH) + + @pytest.fixture def ssl_folder(): if os.path.isdir(SSL_FOLDER_PATH): diff --git a/tests/core/core_checks_test.py b/tests/core/core_checks_test.py index d30b391c..cd3a6acb 100644 --- a/tests/core/core_checks_test.py +++ b/tests/core/core_checks_test.py @@ -21,6 +21,8 @@ save_report, ) +from node_cli.utils.node_type import NodeType + @pytest.fixture def requirements_data(): @@ -377,8 +379,8 @@ def test_merge_report(): def test_get_static_params(tmp_config_dir): - params = get_static_params() + params = get_static_params(NodeType.REGULAR) shutil.copy(STATIC_PARAMS_FILEPATH, tmp_config_dir) - tmp_params = get_static_params(config_path=tmp_config_dir) + tmp_params = get_static_params(NodeType.REGULAR, config_path=tmp_config_dir) assert params['server']['cpu_total'] == 8 assert params == tmp_params diff --git a/tests/core/core_mirage_test.py b/tests/core/core_mirage_test.py index 68ac1ab5..8efde680 100644 --- a/tests/core/core_mirage_test.py +++ b/tests/core/core_mirage_test.py @@ -71,6 +71,7 @@ def test_migrate_mirage_boot( mock_is_alive, mock_is_user_valid, valid_env_file, + inited_node, resource_alloc, meta_file_v3, ): @@ -105,6 +106,7 @@ def test_update_mirage_boot( mock_is_alive, mock_is_user_valid, valid_env_file, + inited_node, resource_alloc, meta_file_v3, ): diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 571595ce..b8db5935 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -297,7 +297,7 @@ def test_init_node(no_resource_file): # todo: write new init node test @pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) -def test_update_node(node_type, mocked_g_config, resource_file): +def test_update_node(node_type, mocked_g_config, resource_file, inited_node): env_filepath = './tests/test-env' resp_mock = response_mock(requests.codes.created) os.makedirs(NODE_DATA_PATH, exist_ok=True) From f6c0c4f7a57fd49c31cdd948fe35d7026db3f81f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 13 May 2025 12:35:01 +0100 Subject: [PATCH 066/332] Addressed comments for PR #851 --- node_cli/core/mirage_boot.py | 2 - node_cli/core/mirage_node.py | 2 - tests/conftest.py | 271 --------------------------------- tests/core/core_mirage_test.py | 8 +- tests/core/core_node_test.py | 2 - text.yml | 8 - 6 files changed, 4 insertions(+), 289 deletions(-) diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index a060882f..5d16affa 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -29,11 +29,9 @@ from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_node_cmd_error -from node_cli.utils.texts import Texts logger = logging.getLogger(__name__) -TEXTS = Texts() @check_not_inited diff --git a/node_cli/core/mirage_node.py b/node_cli/core/mirage_node.py index c72ca440..4a4101f9 100644 --- a/node_cli/core/mirage_node.py +++ b/node_cli/core/mirage_node.py @@ -28,12 +28,10 @@ from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeType -from node_cli.utils.texts import Texts from node_cli.operations import restore_mirage_op logger = logging.getLogger(__name__) -TEXTS = Texts() @check_not_inited diff --git a/tests/conftest.py b/tests/conftest.py index 31fb7cd9..a52ce82b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -50,277 +50,6 @@ from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3, TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN -TEST_ENV_PARAMS = """ -mainnet: - server: - cpu_total: 4 - cpu_physical: 4 - memory: 32 - swap: 16 - disk: 2000000000000 - - packages: - docker: 1.1.3 - docker-compose: 1.1.3 - iptables-persistant: 1.1.3 - lvm2: 1.1.1 - -testnet: - server: - cpu_total: 4 - cpu_physical: 4 - memory: 32 - swap: 16 - disk: 200000000000 - - packages: - docker: 1.1.3 - docker-compose: 1.1.3 - iptables-persistant: 1.1.3 - lvm2: 1.1.1 - -qanet: - server: - cpu_total: 4 - cpu_physical: 4 - memory: 32 - swap: 16 - disk: 200000000000 - - packages: - docker: 1.1.3 - docker-compose: 1.1.3 - iptables-persistant: 1.1.3 - lvm2: 1.1.1 - -devnet: - server: - cpu_total: 4 - cpu_physical: 4 - memory: 32 - swap: 16 - disk: 80000000000 - - packages: - iptables-persistant: 1.1.3 - lvm2: 1.1.1 - docker-compose: 1.1.3 - - docker: - docker-api: 1.1.3 - docker-engine: 1.1.3 -""" - -MIRAGE_TEST_ENV_PARAMS = """ -common: - schain: - shared_space_coefficient: 1 -envs: - mainnet: - server: - cpu_total: 8 - cpu_physical: 1 - memory: 32000000000 - swap: 16000000000 - disk: 1900000000000 - - package: - iptables-persistent: 1.0.4 - lvm2: disabled - btrfs-progs: 4.15.1 - lsof: "4.89" - psmisc: 23.1-1 - - docker: - docker-api: 1.41.0 - docker-engine: 20.10.7 - docker-compose: 1.27.4 - - schain: - snapshotIntervalSec: 86400 - emptyBlockIntervalMs: 10000 - snapshotDownloadTimeout: 18000 - snapshotDownloadInactiveTimeout: 120 - contractStorageLimit: 1000000000000000000 - dbStorageLimit: 1000000000000000000 - maxConsensusStorageBytes: 1000000000000000000 - - skaled_cmd: ["-v 2", "--aa no"] - - node: - bindIP: "0.0.0.0" - logLevel: "info" - logLevelConfig: "info" - pg-threads: 10 - pg-threads-limit: 10 - minCacheSize: 8000000 - maxCacheSize: 16000000 - collectionQueueSize: 20 - collectionDuration: 60 - transactionQueueSize: 1000 - transactionQueueLimitBytes: 69206016 - futureTransactionQueueLimitBytes: 140509184 - maxOpenLeveldbFiles: 1000 - - testnet: - server: - cpu_total: 8 - cpu_physical: 1 - memory: 32000000000 - swap: 16000000000 - disk: 200000000000 - - package: - iptables-persistent: 1.0.4 - lvm2: disabled - btrfs-progs: 4.15.1 - lsof: "4.89" - psmisc: 23.1-1 - - docker: - docker-api: 1.41.0 - docker-engine: 20.10.7 - docker-compose: 1.27.4 - - schain: - snapshotIntervalSec: 86400 - emptyBlockIntervalMs: 10000 - snapshotDownloadTimeout: 18000 - snapshotDownloadInactiveTimeout: 120 - contractStorageLimit: 1000000000000000000 - dbStorageLimit: 1000000000000000000 - maxConsensusStorageBytes: 1000000000000000000 - - skaled_cmd: ["-v 2", "--aa no"] - - node: - bindIP: "0.0.0.0" - logLevel: "info" - logLevelConfig: "info" - pg-threads: 10 - pg-threads-limit: 10 - minCacheSize: 8000000 - maxCacheSize: 16000000 - collectionQueueSize: 20 - collectionDuration: 60 - transactionQueueSize: 1000 - transactionQueueLimitBytes: 69206016 - futureTransactionQueueLimitBytes: 140509184 - maxOpenLeveldbFiles: 1000 - - qanet: - server: - cpu_total: 8 - cpu_physical: 1 - memory: 32000000000 - swap: 16000000000 - disk: 200000000000 - - package: - iptables-persistent: 1.0.4 - lvm2: disabled - btrfs-progs: 4.15.1 - lsof: "4.89" - psmisc: 23.1-1 - - docker: - docker-api: 1.41.0 - docker-engine: 20.10.7 - docker-compose: 1.27.4 - - schain: - snapshotIntervalSec: 3600 - emptyBlockIntervalMs: 10000 - snapshotDownloadTimeout: 18000 - snapshotDownloadInactiveTimeout: 120 - contractStorageLimit: 1000000000000000000 - dbStorageLimit: 1000000000000000000 - maxConsensusStorageBytes: 1000000000000000000 - - skaled_cmd: ["-v 2", "--aa no"] - - node: - bindIP: "0.0.0.0" - logLevel: "info" - logLevelConfig: "info" - pg-threads: 10 - pg-threads-limit: 10 - minCacheSize: 8000000 - maxCacheSize: 16000000 - collectionQueueSize: 20 - collectionDuration: 60 - transactionQueueSize: 1000 - transactionQueueLimitBytes: 69206016 - futureTransactionQueueLimitBytes: 140509184 - maxOpenLeveldbFiles: 1000 - - devnet: - server: - cpu_total: 1 - cpu_physical: 1 - memory: 2000000000 - swap: 2000000000 - disk: 80000000000 - - package: - iptables-persistent: 1.0.4 - lvm2: disabled - btrfs-progs: 4.15.1 - lsof: "4.89" - psmisc: 23.1-1 - - docker: - docker-api: 1.41.0 - docker-engine: 20.10.7 - docker-compose: 1.27.4 - - schain: - snapshotIntervalSec: 3600 - emptyBlockIntervalMs: 10000 - snapshotDownloadTimeout: 18000 - snapshotDownloadInactiveTimeout: 120 - contractStorageLimit: 1000000000000000000 - dbStorageLimit: 1000000000000000000 - maxConsensusStorageBytes: 1000000000000000000 - - skaled_cmd: - ["-v 3", "--web3-trace", "--enable-debug-behavior-apis", "--aa no"] - - node: - bindIP: "0.0.0.0" - logLevel: "info" - logLevelConfig: "info" - pg-threads: 10 - pg-threads-limit: 10 - minCacheSize: 8000000 - maxCacheSize: 16000000 - collectionQueueSize: 20 - collectionDuration: 60 - transactionQueueSize: 1000 - transactionQueueLimitBytes: 69206016 - futureTransactionQueueLimitBytes: 140509184 - maxOpenLeveldbFiles: 1000 -""" - - -@pytest.fixture -def net_params_file(): - with open(STATIC_PARAMS_FILEPATH, 'w') as f: - yaml.dump(yaml.load(TEST_ENV_PARAMS, Loader=yaml.Loader), stream=f, Dumper=yaml.Dumper) - yield STATIC_PARAMS_FILEPATH - os.remove(STATIC_PARAMS_FILEPATH) - - -@pytest.fixture -def mirage_net_params_file(): - with open(MIRAGE_STATIC_PARAMS_FILEPATH, 'w') as f: - yaml.dump( - yaml.load(MIRAGE_TEST_ENV_PARAMS, Loader=yaml.Loader), stream=f, Dumper=yaml.Dumper - ) - yield MIRAGE_STATIC_PARAMS_FILEPATH - os.remove(MIRAGE_STATIC_PARAMS_FILEPATH) - - @pytest.fixture() def tmp_dir_path(): plain_path = 'tests/tmp/' diff --git a/tests/core/core_mirage_test.py b/tests/core/core_mirage_test.py index 8efde680..62d58681 100644 --- a/tests/core/core_mirage_test.py +++ b/tests/core/core_mirage_test.py @@ -18,7 +18,7 @@ def test_restore_mirage( valid_env_file, ensure_meta_removed, ): - mock_env = {'ENV_TYPE': 'devnet-mirage'} + mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env mock_restore_op.return_value = True backup_path = '/fake/backup' @@ -44,7 +44,7 @@ def test_init_mirage_boot( valid_env_file, ensure_meta_removed, ): - mock_env = {'ENV_TYPE': 'devnet-mirage'} + mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env init_boot(valid_env_file) @@ -75,7 +75,7 @@ def test_migrate_mirage_boot( resource_alloc, meta_file_v3, ): - mock_env = {'ENV_TYPE': 'devnet-mirage'} + mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env mock_migrate_op.return_value = True pull_config_for_schain = 'mirage' @@ -110,7 +110,7 @@ def test_update_mirage_boot( resource_alloc, meta_file_v3, ): - mock_env = {'ENV_TYPE': 'devnet-mirage'} + mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env mock_update_op.return_value = True pull_config_for_schain = 'mirage' diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index b8db5935..c71e4683 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -187,8 +187,6 @@ def test_compose_node_env( mock_get_validated.return_value = valid_env_params.copy() if node_type == NodeType.SYNC: mock_get_validated.return_value['ENV_TYPE'] = 'devnet' - elif node_type == NodeType.MIRAGE: - mock_get_validated.return_value['ENV_TYPE'] = 'mainnet' else: mock_get_validated.return_value['ENV_TYPE'] = 'mainnet' diff --git a/text.yml b/text.yml index 0cf1fce1..80461c76 100644 --- a/text.yml +++ b/text.yml @@ -76,11 +76,3 @@ lvmpy: heal: help: Run healing procedure for lvmpy server prompt: Are you sure you want run healing procedure? - -mirage: - init: - help: Initialize sync SKALE node - indexer: Run sync node in indexer mode (disable block rotation) - archive: Enable historic state and disable block rotation - snapshot_from: IP of the node to take snapshot from - snapshot: Start sync node from snapshot \ No newline at end of file From 119541d21face52bb3f747c35a5f79f7279253a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 13 May 2025 12:35:12 +0100 Subject: [PATCH 067/332] Fixed unused imports --- tests/conftest.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index a52ce82b..369b2374 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,7 +27,6 @@ import docker import mock import pytest -import yaml from node_cli.configs import ( CONTAINER_CONFIG_TMP_PATH, @@ -36,8 +35,6 @@ META_FILEPATH, NGINX_CONTAINER_NAME, REMOVED_CONTAINERS_FOLDER_PATH, - STATIC_PARAMS_FILEPATH, - MIRAGE_STATIC_PARAMS_FILEPATH, SCHAIN_NODE_DATA_PATH, NGINX_CONFIG_FILEPATH, ) From 1ccbb0e8c9bf25517ce160a9a1634064cd528941 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 13 May 2025 13:15:28 +0100 Subject: [PATCH 068/332] Removed `Texts` class, moved to using `safe_load_texts` --- node_cli/cli/exit.py | 4 ++-- node_cli/cli/health.py | 4 ++-- node_cli/cli/lvmpy.py | 4 ++-- node_cli/cli/node.py | 3 ++- node_cli/cli/resources_allocation.py | 3 ++- node_cli/cli/ssl.py | 3 ++- node_cli/cli/sync_node.py | 2 +- node_cli/core/host.py | 2 +- node_cli/core/node.py | 4 ++-- node_cli/main.py | 3 ++- node_cli/utils/decorators.py | 4 ++-- node_cli/utils/helper.py | 8 -------- node_cli/utils/print_formatters.py | 4 ++-- node_cli/utils/texts.py | 29 +++++++++++++++------------- 14 files changed, 38 insertions(+), 39 deletions(-) diff --git a/node_cli/cli/exit.py b/node_cli/cli/exit.py index 92322bd9..c7f57db3 100644 --- a/node_cli/cli/exit.py +++ b/node_cli/cli/exit.py @@ -24,10 +24,10 @@ from node_cli.utils.print_formatters import print_exit_status from node_cli.utils.helper import error_exit, get_request, post_request, abort_if_false from node_cli.utils.exit_codes import CLIExitCodes -from node_cli.utils.texts import Texts +from node_cli.utils.texts import safe_load_texts logger = logging.getLogger(__name__) -TEXTS = Texts() +TEXTS = safe_load_texts() BLUEPRINT_NAME = 'node' diff --git a/node_cli/cli/health.py b/node_cli/cli/health.py index de54d186..27607ecb 100644 --- a/node_cli/cli/health.py +++ b/node_cli/cli/health.py @@ -18,12 +18,12 @@ # along with this program. If not, see . import click -from node_cli.utils.texts import Texts +from node_cli.utils.texts import safe_load_texts from node_cli.core.health import get_containers, get_schains_checks, get_sgx_info -G_TEXTS = Texts() +G_TEXTS = safe_load_texts() TEXTS = G_TEXTS['health'] diff --git a/node_cli/cli/lvmpy.py b/node_cli/cli/lvmpy.py index d795a78e..2e9772aa 100644 --- a/node_cli/cli/lvmpy.py +++ b/node_cli/cli/lvmpy.py @@ -20,11 +20,11 @@ import click from node_cli.utils.helper import abort_if_false -from node_cli.utils.texts import Texts +from node_cli.utils.texts import safe_load_texts from lvmpy.src.app import run as run_lvmpy from lvmpy.src.health import heal_service -G_TEXTS = Texts() +G_TEXTS = safe_load_texts() TEXTS = G_TEXTS['lvmpy'] diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 4c9e40e3..a7f3d17c 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -39,7 +39,8 @@ from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.configs.env import ALLOWED_ENV_TYPES from node_cli.utils.decorators import check_inited -from node_cli.utils.helper import abort_if_false, safe_load_texts, streamed_cmd, IP_TYPE +from node_cli.utils.helper import abort_if_false, streamed_cmd, IP_TYPE +from node_cli.utils.texts import safe_load_texts from node_cli.utils.meta import get_meta_info from node_cli.utils.print_formatters import print_meta_info diff --git a/node_cli/cli/resources_allocation.py b/node_cli/cli/resources_allocation.py index 9a518a2d..01825350 100644 --- a/node_cli/cli/resources_allocation.py +++ b/node_cli/cli/resources_allocation.py @@ -24,7 +24,8 @@ get_resource_allocation_info, generate_resource_allocation_config, ) -from node_cli.utils.helper import abort_if_false, safe_load_texts +from node_cli.utils.helper import abort_if_false +from node_cli.utils.texts import safe_load_texts from node_cli.utils.node_type import NodeType TEXTS = safe_load_texts() diff --git a/node_cli/cli/ssl.py b/node_cli/cli/ssl.py index e0b63f7c..e371e8ce 100644 --- a/node_cli/cli/ssl.py +++ b/node_cli/cli/ssl.py @@ -21,7 +21,8 @@ from terminaltables import SingleTable from node_cli.utils.exit_codes import CLIExitCodes -from node_cli.utils.helper import safe_load_texts, error_exit +from node_cli.utils.helper import error_exit +from node_cli.utils.texts import safe_load_texts from node_cli.configs.ssl import DEFAULT_SSL_CHECK_PORT, SSL_CERT_FILEPATH, SSL_KEY_FILEPATH from node_cli.core.ssl import check_cert, upload_cert, cert_status diff --git a/node_cli/cli/sync_node.py b/node_cli/cli/sync_node.py index ea4ee280..5f0a5217 100644 --- a/node_cli/cli/sync_node.py +++ b/node_cli/cli/sync_node.py @@ -25,10 +25,10 @@ from node_cli.utils.helper import ( abort_if_false, error_exit, - safe_load_texts, streamed_cmd, URL_TYPE, ) +from node_cli.utils.texts import safe_load_texts G_TEXTS = safe_load_texts() diff --git a/node_cli/core/host.py b/node_cli/core/host.py index e3279b75..7c9b2df6 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -54,7 +54,7 @@ from node_cli.core.nftables import NFTablesManager from node_cli.utils.helper import safe_mkdir -from node_cli.utils.helper import safe_load_texts +from node_cli.utils.texts import safe_load_texts TEXTS = safe_load_texts() diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 19f4a688..f6b534cd 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -70,7 +70,7 @@ post_request, ) from node_cli.utils.meta import get_meta_info -from node_cli.utils.texts import Texts +from node_cli.utils.texts import safe_load_texts from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.decorators import check_not_inited, check_inited, check_user from node_cli.utils.docker_utils import ( @@ -86,7 +86,7 @@ logger = logging.getLogger(__name__) -TEXTS = Texts() +TEXTS = safe_load_texts() BASE_CONTAINERS_AMOUNT = 5 BLUEPRINT_NAME = 'node' diff --git a/node_cli/main.py b/node_cli/main.py index d1d152ec..8ec7420a 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -43,7 +43,8 @@ from node_cli.core.host import init_logs_dir from node_cli.utils.node_type import NodeType from node_cli.configs import LONG_LINE -from node_cli.utils.helper import safe_load_texts, init_default_logger +from node_cli.utils.helper import init_default_logger +from node_cli.utils.texts import safe_load_texts from node_cli.utils.helper import error_exit TEXTS = safe_load_texts() diff --git a/node_cli/utils/decorators.py b/node_cli/utils/decorators.py index f0d11e00..f72fea60 100644 --- a/node_cli/utils/decorators.py +++ b/node_cli/utils/decorators.py @@ -22,11 +22,11 @@ from node_cli.core.host import is_node_inited from node_cli.utils.global_config import get_system_user from node_cli.utils.helper import error_exit, is_user_valid, get_g_conf_user -from node_cli.utils.texts import Texts +from node_cli.utils.texts import safe_load_texts from node_cli.utils.exit_codes import CLIExitCodes -TEXTS = Texts() +TEXTS = safe_load_texts() def check_not_inited(f): diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 07dbef19..999720ea 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -187,14 +187,6 @@ def safe_get_config(config, key): return None -def safe_load_texts(): - with open(TEXT_FILE, 'r') as stream: - try: - return yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) - - def safe_load_yml(filepath): with open(filepath, 'r') as stream: try: diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index b5448d45..3ba9171b 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -29,9 +29,9 @@ from node_cli.configs import LONG_LINE from node_cli.configs.cli_logger import DEBUG_LOG_FILEPATH from node_cli.utils.meta import CliMeta -from node_cli.utils.texts import Texts +from node_cli.utils.texts import safe_load_texts -TEXTS = Texts() +TEXTS = safe_load_texts() def print_wallet_info(wallet): diff --git a/node_cli/utils/texts.py b/node_cli/utils/texts.py index d4813d27..718edd75 100644 --- a/node_cli/utils/texts.py +++ b/node_cli/utils/texts.py @@ -1,6 +1,16 @@ # -*- coding: utf-8 -*- # # This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # @@ -11,16 +21,9 @@ from node_cli.configs import TEXT_FILE -class Texts: - def __init__(self): - self._texts = self._load() - - def __getitem__(self, key): - return self._texts.get(key) - - def _load(self): - with open(TEXT_FILE, 'r') as stream: - try: - return yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) +def safe_load_texts(): + with open(TEXT_FILE, 'r') as stream: + try: + return yaml.safe_load(stream) + except yaml.YAMLError as exc: + print(exc) From 58b335f83b489af64b373b519731f43b71868975 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 13 May 2025 13:15:34 +0100 Subject: [PATCH 069/332] Removed unused import --- node_cli/utils/helper.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 999720ea..c9b21d9f 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -50,7 +50,6 @@ from node_cli.utils.print_formatters import print_err_response from node_cli.utils.exit_codes import CLIExitCodes from node_cli.configs import ( - TEXT_FILE, ADMIN_HOST, ADMIN_PORT, HIDE_STREAM_LOG, From aa2e90b63fc26329ba5b381789ed9652eac2df41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 13 May 2025 13:19:38 +0100 Subject: [PATCH 070/332] Fixed README indentation. Changed docker ocntainer removal in publish pipeline. --- .github/workflows/publish.yml | 2 +- README.md | 34 +++++++++++++++++----------------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 4365594a..f4e46399 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -108,7 +108,7 @@ jobs: scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} ${{ matrix.build_type }} echo "Contents of dist directory:" ls -altr ${{ github.workspace }}/dist/ - docker rm -f $(docker ps -aq) + docker rm -f $(docker ps -aq) || true - name: Save sha512sum for ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} run: | diff --git a/README.md b/README.md index b4d0e691..6353d4f1 100644 --- a/README.md +++ b/README.md @@ -9,26 +9,26 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line ## Table of Contents 1. [Installation](#installation) - 1.1 [Standard Node Binary](#standard-node-binary) - 1.2 [Sync Node Binary](#sync-node-binary) - 1.3 [Mirage Node Binary](#mirage-node-binary) - 1.4 [Permissions and Testing](#permissions-and-testing) + 1. [Standard Node Binary](#standard-node-binary) + 2. [Sync Node Binary](#sync-node-binary) + 3. [Mirage Node Binary](#mirage-node-binary) + 4. [Permissions and Testing](#permissions-and-testing) 2. [Standard Node Usage (`skale` - Normal Build)](#standard-node-usage-skale---normal-build) - 2.1 [Top level commands (Standard)](#top-level-commands-standard) - 2.2 [Node commands (Standard)](#node-commands-standard) - 2.3 [Wallet commands (Standard)](#wallet-commands-standard) - 2.4 [sChain commands (Standard)](#schain-commands-standard) - 2.5 [Health commands (Standard)](#health-commands-standard) - 2.6 [SSL commands (Standard)](#ssl-commands-standard) - 2.7 [Logs commands (Standard)](#logs-commands-standard) - 2.8 [Resources allocation commands (Standard)](#resources-allocation-commands-standard) + 1. [Top level commands (Standard)](#top-level-commands-standard) + 2. [Node commands (Standard)](#node-commands-standard) + 3. [Wallet commands (Standard)](#wallet-commands-standard) + 4. [sChain commands (Standard)](#schain-commands-standard) + 5. [Health commands (Standard)](#health-commands-standard) + 6. [SSL commands (Standard)](#ssl-commands-standard) + 7. [Logs commands (Standard)](#logs-commands-standard) + 8. [Resources allocation commands (Standard)](#resources-allocation-commands-standard) 3. [Sync Node Usage (`skale` - Sync Build)](#sync-node-usage-skale---sync-build) - 3.1 [Top level commands (Sync)](#top-level-commands-sync) - 3.2 [Sync node commands](#sync-node-commands) + 1. [Top level commands (Sync)](#top-level-commands-sync) + 2. [Sync node commands](#sync-node-commands) 4. [Mirage Node Usage (`mirage`)](#mirage-node-usage-mirage) - 4.1 [Top level commands (Mirage)](#top-level-commands-mirage) - 4.2 [Mirage Boot commands](#mirage-boot-commands) - 4.3 [Mirage Node commands](#mirage-node-commands) + 1. [Top level commands (Mirage)](#top-level-commands-mirage) + 2. [Mirage Boot commands](#mirage-boot-commands) + 3. [Mirage Node commands](#mirage-node-commands) 5. [Exit codes](#exit-codes) 6. [Development](#development) From 2a9e5189dc2bf2ea49d6fe896278db02f139d249 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 13 May 2025 13:42:18 +0100 Subject: [PATCH 071/332] Fix mirage boot and mirage node CLI namespaces --- node_cli/cli/mirage_boot.py | 23 +++++++++++------------ node_cli/cli/mirage_node.py | 21 ++++++++++++--------- 2 files changed, 23 insertions(+), 21 deletions(-) diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py index 4419b92a..38126751 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/mirage_boot.py @@ -25,21 +25,24 @@ from node_cli.utils.helper import streamed_cmd, IP_TYPE, error_exit, abort_if_false -@click.group('boot', help='Commands for the Mirage Boot phase.') +@click.group() def mirage_boot_cli(): pass -@mirage_boot_cli.command('init', help='Initialize Mirage node (Boot Phase).') +@mirage_boot_cli.group(help='Commands for the Mirage Boot phase.') +def boot(): + pass + + +@boot.command('init', help='Initialize Mirage node (Boot Phase).') @click.argument('env_file') @streamed_cmd def init_boot(env_file): init(env_file) -@mirage_boot_cli.command( - 'register', help='Register Mirage node in SKALE Manager (during Boot Phase).' -) +@boot.command('register', help='Register Mirage node in SKALE Manager (during Boot Phase).') @click.option( '--name', '-n', required=True, prompt='Enter mirage node name', help='Mirage node name' ) @@ -58,9 +61,7 @@ def register_boot(name, ip, port, domain): register(name=name, p2p_ip=ip, public_ip=ip, port=port, domain_name=domain) -@mirage_boot_cli.command( - 'signature', help='Get mirage node signature for a validator ID (during Boot Phase).' -) +@boot.command('signature', help='Get mirage node signature for a validator ID (during Boot Phase).') @click.argument('validator_id') def signature_boot(validator_id): res = get_node_signature(validator_id) @@ -69,9 +70,7 @@ def signature_boot(validator_id): print(f'Signature: {res}') -@mirage_boot_cli.command( - 'migrate', help='Migrate mirage node from Mirage Boot Phase to Mirage Main Phase.' -) +@boot.command('migrate', help='Migrate mirage node from Mirage Boot Phase to Mirage Main Phase.') @click.option( '--yes', is_flag=True, @@ -86,7 +85,7 @@ def migrate_boot(env_file, pull_config_for_schain): migrate(env_file, pull_config_for_schain) -@mirage_boot_cli.command('update', help='Update Mirage node from .env file') +@boot.command('update', help='Update Mirage node from .env file') @click.option( '--yes', is_flag=True, diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 0fc32668..86ba8cc0 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -24,24 +24,27 @@ from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false -@click.group('node', help='Commands for regular Mirage Node operations.') +@click.group() def mirage_node_cli(): pass -@mirage_node_cli.command('init', help='Initialize regular Mirage node operations (Placeholder).') +@mirage_node_cli.group(help='Commands for regular Mirage Node operations.') +def node(): + pass + + +@node.command('init', help='Initialize regular Mirage node operations (Placeholder).') def init_node(): click.echo("Placeholder: Command 'mirage node init' is not yet implemented.") -@mirage_node_cli.command( - 'register', help='Register Mirage node (Placeholder for regular operations).' -) +@node.command('register', help='Register Mirage node (Placeholder for regular operations).') def register_node(): click.echo("Placeholder: Command 'mirage node register' is not yet implemented.") -@mirage_node_cli.command('update', help='Update Mirage.') +@node.command('update', help='Update Mirage.') @click.option( '--yes', is_flag=True, @@ -57,7 +60,7 @@ def update_node(env_file, pull_config_for_schain, unsafe_ok): click.echo("Placeholder: Command 'mirage node update' is not yet implemented.") -@mirage_node_cli.command('signature', help='Get mirage node signature for a validator ID.') +@node.command('signature', help='Get mirage node signature for a validator ID.') @click.argument('validator_id') def signature_node(validator_id): res = get_node_signature(validator_id) @@ -66,14 +69,14 @@ def signature_node(validator_id): print(f'Signature: {res}') -@mirage_node_cli.command('backup', help='Generate backup file for the Mirage node.') +@node.command('backup', help='Generate backup file for the Mirage node.') @click.argument('backup_folder_path') @streamed_cmd def backup_node(backup_folder_path): backup(backup_folder_path) -@mirage_node_cli.command('restore', help='Restore Mirage node from a backup file.') +@node.command('restore', help='Restore Mirage node from a backup file.') @click.argument('backup_path') @click.argument('env_file') @click.option( From 8c3bbe034c908d2ff036693fcbc02feacdf5d532 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 13 May 2025 13:54:25 +0100 Subject: [PATCH 072/332] Fix DOCKER_LVMPY_STREAM param in update_meta for mirage ops --- node_cli/operations/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index c03f7f35..c95796da 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -179,7 +179,7 @@ def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], - env['DOCKER_LVMPY_STREAM'], + None, distro.id(), distro.version(), ) @@ -219,7 +219,7 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], - env['DOCKER_LVMPY_STREAM'], + None, distro.id(), distro.version(), ) @@ -283,7 +283,7 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], - env['DOCKER_LVMPY_STREAM'], + None, distro.id(), distro.version(), ) @@ -480,7 +480,7 @@ def restore_mirage(env, backup_path, config_only=False): update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], - env['DOCKER_LVMPY_STREAM'], + None, distro.id(), distro.version(), ) From 6abc49a89eab9bcb4af43cd74e17a676a55e3560 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 13 May 2025 15:11:43 +0100 Subject: [PATCH 073/332] Add prepare_block_device to mirage init and update --- node_cli/operations/base.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index c95796da..82fd5181 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -203,6 +203,7 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: configure_nftables(enable_monitoring=enable_monitoring) generate_nginx_config() + prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') prepare_host(env_filepath, env['ENV_TYPE']) @@ -279,6 +280,7 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: configure_filebeat() configure_flask() generate_nginx_config() + prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') update_meta( VERSION, From 1ca9109465c6c3a60b191281aec806fc29f1f837 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 13 May 2025 15:58:24 +0100 Subject: [PATCH 074/332] Unmount datadir in init/update/mirage for mirage boot --- node_cli/operations/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 82fd5181..d4e820e2 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -152,6 +152,7 @@ def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: @checked_host def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: compose_rm(node_type=NodeType.MIRAGE, env=env) + cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -192,6 +193,7 @@ def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: def update_mirage_boot(env_filepath: str, env: Dict) -> bool: compose_rm(node_type=NodeType.MIRAGE, env=env) remove_dynamic_containers() + cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -266,6 +268,7 @@ def init(env_filepath: str, env: dict, node_type: NodeType) -> None: @checked_host def init_mirage_boot(env_filepath: str, env: dict) -> None: sync_skale_node() + cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) ensure_btrfs_kernel_module_autoloaded() if env.get('SKIP_DOCKER_CONFIG') != 'True': From d310fdabcc11fd66c57bc5395e8a1a52e0425ebf Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 13 May 2025 16:35:47 +0100 Subject: [PATCH 075/332] Fix mirage update boot cmd --- node_cli/operations/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index d4e820e2..1ab98780 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -227,7 +227,7 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: distro.version(), ) update_images(env=env) - compose_up(env=env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) return True From ae92ad1ae7783f33af3db1334ed772e8d598844d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 13 May 2025 17:14:47 +0100 Subject: [PATCH 076/332] Fixed `update_images` function to use NodeType correctly --- node_cli/operations/base.py | 15 ++++++++------- node_cli/operations/skale_node.py | 7 ++++--- node_cli/utils/docker_utils.py | 8 ++++---- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 1ab98780..8eb5dd2f 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -18,6 +18,7 @@ # along with this program. If not, see . import time +from tkinter import N import distro import functools @@ -144,7 +145,7 @@ def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: distro.id(), distro.version(), ) - update_images(env=env) + update_images(env=env, node_type=node_type) compose_up(env=env, node_type=node_type) return True @@ -184,7 +185,7 @@ def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env) + update_images(env=env, node_type=NodeType.MIRAGE) compose_up(env=env, node_type=NodeType.MIRAGE) return True @@ -226,7 +227,7 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env) + update_images(env=env, node_type=NodeType.MIRAGE) compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) return True @@ -260,7 +261,7 @@ def init(env_filepath: str, env: dict, node_type: NodeType) -> None: distro.version(), ) update_resource_allocation(env_type=env['ENV_TYPE']) - update_images(env=env) + update_images(env=env, node_type=node_type) compose_up(env=env, node_type=node_type) @@ -292,7 +293,7 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: distro.id(), distro.version(), ) - update_images(env=env) + update_images(env=env, node_type=NodeType.MIRAGE) compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) @@ -345,7 +346,7 @@ def init_sync( ts = int(time.time()) update_node_cli_schain_status(schain_name, repair_ts=ts, snapshot_from=snapshot_from) - update_images(env=env, sync_node=True) + update_images(env=env, node_type=NodeType.SYNC) compose_up(env=env, node_type=NodeType.SYNC) @@ -377,7 +378,7 @@ def update_sync(env_filepath: str, env: Dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, sync_node=True) + update_images(env=env, node_type=NodeType.SYNC) compose_up(env=env, node_type=NodeType.SYNC) return True diff --git a/node_cli/operations/skale_node.py b/node_cli/operations/skale_node.py index 5ccb1a85..656e4cb1 100644 --- a/node_cli/operations/skale_node.py +++ b/node_cli/operations/skale_node.py @@ -27,17 +27,18 @@ from node_cli.utils.git_utils import clone_repo from node_cli.utils.docker_utils import compose_pull, compose_build from node_cli.configs import CONTAINER_CONFIG_PATH, CONTAINER_CONFIG_TMP_PATH, SKALE_NODE_REPO_URL +from node_cli.utils.node_type import NodeType logger = logging.getLogger(__name__) -def update_images(env: dict, sync_node: bool = False) -> None: +def update_images(env: dict, node_type: NodeType) -> None: local = env.get('CONTAINER_CONFIGS_DIR') != '' if local: - compose_build(env=env, sync_node=sync_node) + compose_build(env=env, node_type=node_type) else: - compose_pull(env=env, sync_node=sync_node) + compose_pull(env=env, node_type=node_type) def download_skale_node(stream: Optional[str] = None, src: Optional[str] = None) -> None: diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 60e869f0..d6438253 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -268,15 +268,15 @@ def compose_rm(node_type: NodeType, env={}): logger.info('Compose containers removed') -def compose_pull(env: dict, sync_node: bool = False): +def compose_pull(env: dict, node_type: NodeType): logger.info('Pulling compose containers') - compose_path = get_compose_path(NodeType.SYNC) + compose_path = get_compose_path(node_type) run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'pull'), env=env) -def compose_build(env: dict, sync_node: bool = False): +def compose_build(env: dict, node_type: NodeType): logger.info('Building compose containers') - compose_path = get_compose_path(NodeType.SYNC) + compose_path = get_compose_path(node_type) run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'build'), env=env) From 3b7dc0a785e074a8edd151564e12d15fd8238dee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 13 May 2025 17:14:57 +0100 Subject: [PATCH 077/332] Removed unused import --- node_cli/operations/base.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 8eb5dd2f..ccc29a1b 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -18,7 +18,6 @@ # along with this program. If not, see . import time -from tkinter import N import distro import functools From 6507eb5934ee820865f124962707dd1f1402f7a6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9=20Gon=C3=A7alves?= Date: Tue, 13 May 2025 17:54:51 +0100 Subject: [PATCH 078/332] Added `info` command to mirage boot and mirage node --- node_cli/cli/mirage_boot.py | 8 +++++++- node_cli/cli/mirage_node.py | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py index 38126751..4c783393 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/mirage_boot.py @@ -19,7 +19,7 @@ import click -from node_cli.core.node import get_node_signature, register_node as register +from node_cli.core.node import get_node_signature, register_node as register, get_node_info from node_cli.core.mirage_boot import init, migrate, update from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.utils.helper import streamed_cmd, IP_TYPE, error_exit, abort_if_false @@ -35,6 +35,12 @@ def boot(): pass +@boot.command('info', help='Get info about Mirage node (Boot Phase).') +@click.option('--format', '-f', type=click.Choice(['json', 'text'])) +def mirage_boot_info(format): + get_node_info(format) + + @boot.command('init', help='Initialize Mirage node (Boot Phase).') @click.argument('env_file') @streamed_cmd diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 86ba8cc0..e983dc47 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -19,7 +19,7 @@ import click -from node_cli.core.node import get_node_signature, backup +from node_cli.core.node import get_node_signature, backup, get_node_info from node_cli.core.mirage_node import restore_mirage from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false @@ -34,6 +34,12 @@ def node(): pass +@node.command('info', help='Get info about Mirage node.') +@click.option('--format', '-f', type=click.Choice(['json', 'text'])) +def mirage_node_info(format): + get_node_info(format) + + @node.command('init', help='Initialize regular Mirage node operations (Placeholder).') def init_node(): click.echo("Placeholder: Command 'mirage node init' is not yet implemented.") From b3b6846970808b4b4aa4d32ee55def74b789c0c6 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 21 May 2025 12:18:49 +0100 Subject: [PATCH 079/332] Add mirage-* branches to build pipeline --- .github/workflows/publish.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 191dd4ac..d5d3bff4 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -9,6 +9,7 @@ on: - stable - 'v*.*.*' - 'mirage' + - 'mirage-*' jobs: create_release: From 8ddc21d179b0239945a31457be00bd2462b82a65 Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 4 Jun 2025 12:49:40 +0100 Subject: [PATCH 080/332] Use redis instead of node-cli.status --- node_cli/cli/mirage_boot.py | 2 +- node_cli/cli/mirage_node.py | 27 +++- node_cli/configs/__init__.py | 2 + node_cli/core/checks.py | 18 +-- node_cli/core/static_config.py | 22 +++ node_cli/mirage/__init__.py | 0 node_cli/{core => mirage}/mirage_boot.py | 0 node_cli/{core => mirage}/mirage_node.py | 24 ++- node_cli/mirage/record/__init__.py | 0 node_cli/mirage/record/chain_record.py | 61 +++++++ node_cli/mirage/record/redis_record.py | 151 ++++++++++++++++++ scripts/run_redis.sh | 5 + scripts/run_tests.sh | 2 + setup.py | 1 + tests/mirage/__init__.py | 0 .../mirage_node_cli_test.py} | 40 ++--- text.yml | 10 +- 17 files changed, 323 insertions(+), 42 deletions(-) create mode 100644 node_cli/core/static_config.py create mode 100644 node_cli/mirage/__init__.py rename node_cli/{core => mirage}/mirage_boot.py (100%) rename node_cli/{core => mirage}/mirage_node.py (68%) create mode 100644 node_cli/mirage/record/__init__.py create mode 100644 node_cli/mirage/record/chain_record.py create mode 100644 node_cli/mirage/record/redis_record.py create mode 100755 scripts/run_redis.sh create mode 100644 tests/mirage/__init__.py rename tests/{core/core_mirage_test.py => mirage/mirage_node_cli_test.py} (72%) diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py index 4c783393..b3b555e5 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/mirage_boot.py @@ -20,7 +20,7 @@ import click from node_cli.core.node import get_node_signature, register_node as register, get_node_info -from node_cli.core.mirage_boot import init, migrate, update +from node_cli.mirage.mirage_boot import init, migrate, update from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.utils.helper import streamed_cmd, IP_TYPE, error_exit, abort_if_false diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index e983dc47..57c5e3ab 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -18,10 +18,14 @@ # along with this program. If not, see . import click +from yaml import safe_load from node_cli.core.node import get_node_signature, backup, get_node_info -from node_cli.core.mirage_node import restore_mirage -from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false +from node_cli.mirage.mirage_node import restore_mirage, toggle_mirage_repair +from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false, URL_TYPE +from node_cli.utils.texts import safe_load_texts + +TEXTS = safe_load_texts() @click.group() @@ -94,3 +98,22 @@ def backup_node(backup_folder_path): @streamed_cmd def restore_node(backup_path, env_file, config_only): restore_mirage(backup_path, env_file, config_only) + + +@node.command('repair', help='Toggle mirage chain repair mode') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt=TEXTS['mirage']['node']['repair']['warning'] +) +@click.option( + '--snapshot-from', + type=URL_TYPE, + default=None, + hidden=True, + help=TEXTS['mirage']['node']['repair']['snapshot_from'] +) +def repair(snapshot_from: str | None = None) -> None: + toggle_mirage_repair(snapshot_from=snapshot_from) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 668e8ea1..be9f8bdd 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -169,3 +169,5 @@ def _get_env(): UFW_CONFIG_PATH = '/etc/default/ufw' UFW_IPV6_BEFORE_INPUT_CHAIN = 'ufw6-before-input' + +REDIS_URI: str = os.getenv('REDIS_URI', 'redis://@127.0.0.1:6379') diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index 1428c440..951a6b22 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -60,6 +60,7 @@ ) from node_cli.core.host import is_ufw_ipv6_chain_exists, is_ufw_ipv6_option_enabled from node_cli.core.resources import get_disk_size +from node_cli.core.static_config import get_static_params from node_cli.utils.docker_utils import NodeType from node_cli.utils.helper import run_cmd, safe_mkdir @@ -78,23 +79,6 @@ FuncList = List[Func] -def get_static_params( - node_type: NodeType, - env_type: str = 'mainnet', - config_path: str = CONTAINER_CONFIG_PATH, -) -> Dict: - if node_type == NodeType.MIRAGE: - static_params_base_filepath = MIRAGE_STATIC_PARAMS_FILEPATH - else: - static_params_base_filepath = STATIC_PARAMS_FILEPATH - - static_params_filename = os.path.basename(static_params_base_filepath) - static_params_filepath = os.path.join(config_path, static_params_filename) - with open(static_params_filepath) as requirements_file: - ydata = yaml.load(requirements_file, Loader=yaml.Loader) - return ydata['envs'][env_type] - - def check_quietly(check: Func, *args, **kwargs) -> CheckResult: try: return check(*args, **kwargs) diff --git a/node_cli/core/static_config.py b/node_cli/core/static_config.py new file mode 100644 index 00000000..f3fcf923 --- /dev/null +++ b/node_cli/core/static_config.py @@ -0,0 +1,22 @@ +import os +import yaml + +from node_cli.configs import CONTAINER_CONFIG_PATH, MIRAGE_STATIC_PARAMS_FILEPATH, STATIC_PARAMS_FILEPATH +from node_cli.utils.node_type import NodeType + + +def get_static_params( + node_type: NodeType, + env_type: str = 'mainnet', + config_path: str = CONTAINER_CONFIG_PATH, +) -> dict: + if node_type == NodeType.MIRAGE: + static_params_base_filepath = MIRAGE_STATIC_PARAMS_FILEPATH + else: + static_params_base_filepath = STATIC_PARAMS_FILEPATH + + static_params_filename = os.path.basename(static_params_base_filepath) + static_params_filepath = os.path.join(config_path, static_params_filename) + with open(static_params_filepath) as requirements_file: + ydata = yaml.load(requirements_file, Loader=yaml.Loader) + return ydata['envs'][env_type] diff --git a/node_cli/mirage/__init__.py b/node_cli/mirage/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/node_cli/core/mirage_boot.py b/node_cli/mirage/mirage_boot.py similarity index 100% rename from node_cli/core/mirage_boot.py rename to node_cli/mirage/mirage_boot.py diff --git a/node_cli/core/mirage_node.py b/node_cli/mirage/mirage_node.py similarity index 68% rename from node_cli/core/mirage_node.py rename to node_cli/mirage/mirage_node.py index 4a4101f9..b1b157cd 100644 --- a/node_cli/core/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -22,16 +22,21 @@ import time from node_cli.configs import SKALE_DIR, RESTORE_SLEEP_TIMEOUT -from node_cli.core.host import save_env_params +from node_cli.configs.env import SKALE_DIR_ENV_FILEPATH from node_cli.core.node import compose_node_env -from node_cli.utils.decorators import check_not_inited +from node_cli.core.host import save_env_params +from node_cli.core.static_config import get_static_params +from node_cli.mirage.record.chain_record import ChainRecord +from node_cli.operations import restore_mirage_op +from node_cli.utils.decorators import check_inited, check_not_inited from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeType -from node_cli.operations import restore_mirage_op +from node_cli.utils.texts import safe_load_texts logger = logging.getLogger(__name__) +TEXTS = safe_load_texts() @check_not_inited @@ -47,3 +52,16 @@ def restore_mirage(backup_path, env_filepath, config_only=False): error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) time.sleep(RESTORE_SLEEP_TIMEOUT) print('Mirage node is restored from backup') + + +@check_inited +def toggle_mirage_repair(snapshot_from: str | None = None) -> None: + node_type = NodeType.MIRAGE + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type) + params = get_static_params(node_type, env['ENV_TYPE']) + record = ChainRecord(params['info']['chain_name']) + record.set_repair_ts(int(time.time())) + if snapshot_from: + record.set_snapshot_from(snapshot_from) + + print(TEXTS['mirage']['toggle_repair']) diff --git a/node_cli/mirage/record/__init__.py b/node_cli/mirage/record/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/node_cli/mirage/record/chain_record.py b/node_cli/mirage/record/chain_record.py new file mode 100644 index 00000000..2d8a170e --- /dev/null +++ b/node_cli/mirage/record/chain_record.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# +# This file is part of Node cli +# +# Copyright (C) 2025 SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + + + +import logging +from typing import cast +from datetime import datetime + +from node_cli.mirage.record.redis_record import FlatRedisRecord, FieldInfo + +logger = logging.getLogger(__name__) + + +CHAIN_RECORD_FIELDS: dict[str, FieldInfo] = { + 'repair_date': FieldInfo('repair_date', datetime, datetime.fromtimestamp(0)), + 'repair_ts': FieldInfo('repair_ts', int, None), + 'snapshot_from': FieldInfo('snapshot_from', str, None), +} + + +class ChainRecord(FlatRedisRecord): + def _record_fields(self) -> dict[str, FieldInfo]: + return CHAIN_RECORD_FIELDS + + @property + def repair_date(self) -> datetime: + return cast(datetime, self._get_field('repair_date')) + + @property + def snapshot_from(self) -> str | None: + return cast(str | None, self._get_field('snapshot_from')) + + @property + def repair_ts(self) -> int | None: + return cast(int | None, self._get_field('repair_ts')) + + def set_repair_date(self, date: datetime) -> None: + self._set_field('repair_date', date) + + def set_snapshot_from(self, value: str | None) -> None: + self._set_field('snapshot_from', value) + + def set_repair_ts(self, value: int | None) -> None: + self._set_field('repair_ts', value) diff --git a/node_cli/mirage/record/redis_record.py b/node_cli/mirage/record/redis_record.py new file mode 100644 index 00000000..78f87d94 --- /dev/null +++ b/node_cli/mirage/record/redis_record.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import abc +from typing import Any +from datetime import datetime +from dataclasses import dataclass + +import redis + +from node_cli.configs import REDIS_URI + +cpool: redis.ConnectionPool = redis.ConnectionPool.from_url(REDIS_URI) +rs: redis.Redis = redis.Redis(connection_pool=cpool) + +@dataclass +class FieldInfo: + name: str + type: type + default: str | int | bool | datetime | None + + +class FlatRedisRecord: + def __init__(self, name: str): + self.name = name + if not self._exists(): + self._set_defaults() + self._save() + + def to_dict(self) -> dict: + return self.mget(*self._record_fields().keys()) + + def mget(self, *args) -> dict[str, Any]: + key_names = [self._get_field_key(field_name) for field_name in args] + raw_res = rs.mget(*key_names) + return { + key_name: self._deserialize_field(value, self._record_fields()[key_name].type) + for value, key_name in zip(raw_res, args) + } + + def mset(self, **kwargs) -> None: + key_names = [self._get_field_key(field_name) for field_name in kwargs.keys()] + values = [ + self._serialize_field(value, self._record_fields()[field_name].type) + for value, field_name in zip(kwargs.values(), kwargs.keys()) + ] + rs.mset(dict(zip(key_names, values))) + + def delete(self) -> None: + rs.delete(*self._key_names()) + + def _get_field_key(self, field_name: str) -> str: + return f'{self.name}_{field_name}' + + def _serialize_datetime(self, dt: datetime) -> str: + return dt.isoformat() + + def _deserialize_datetime(self, value: str) -> datetime: + return datetime.fromisoformat(value) + + def _get_field(self, field_name: str): + key = self._get_field_key(field_name) + value = rs.get(key) + if value is None: + raise ValueError(f"Field '{field_name}' not found in record '{self.name}'") + return self._deserialize_field(value, self._record_fields()[field_name].type) + + def _set_field(self, field_name: str, value) -> None: + key = self._get_field_key(field_name) + serialized_value = self._serialize_field(value, self._record_fields()[field_name].type) + rs.set(key, serialized_value) + + def _deserialize_field(self, value, field_type: type): + if value is None: + return None + val = value.decode('utf-8') + if field_type is datetime: + return self._deserialize_datetime(val) + elif field_type is bool: + return bool(int(val)) + elif field_type is int: + return int(val) + else: + return val + + def _serialize_field(self, value, field_type: type): + if field_type is datetime: + return self._serialize_datetime(value) + elif field_type is bool: + return int(value) + elif field_type is int: + return value + else: + return str(value) + + def _key_names(self) -> list[str]: + return [self._get_field_key(field_name) for field_name in self._record_fields().keys()] + + def _set_defaults(self) -> None: + record_fields = self._record_fields() + defaults_to_set = { + field_name: field_info.default + for field_name, field_info in record_fields.items() + if field_info.default is not None + } + if defaults_to_set: + self.mset(**defaults_to_set) + + def _exists(self) -> bool: + return rs.exists(self._get_field_key('name')) > 0 + + def _save(self) -> None: + self._set_field('name', self.name) + + @abc.abstractmethod + def _record_fields(self) -> dict[str, FieldInfo]: + """Return a list of FieldInfo objects representing the fields of the record.""" + + @classmethod + def _redis_key_to_field_name(cls, key: bytes) -> str: + return key[:-5].decode('utf-8') + + @classmethod + def find_all(cls) -> list['FlatRedisRecord']: + name_keys = rs.keys('*_name') + records = [] + for key in name_keys: + chain_name = cls._redis_key_to_field_name(key) + records.append(cls(chain_name)) + return records + + def __eq__(self, other) -> bool: + if not isinstance(other, FlatRedisRecord): + return False + return self.name == other.name diff --git a/scripts/run_redis.sh b/scripts/run_redis.sh new file mode 100755 index 00000000..b3591b97 --- /dev/null +++ b/scripts/run_redis.sh @@ -0,0 +1,5 @@ +set -e + +docker rm -f redis || true +export DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +docker run -v $DIR/../tests/redis-conf:/config --network=host --name=redis -d redis:6.0-alpine diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index 97676b9a..0d019759 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -3,6 +3,8 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" PROJECT_DIR=$(dirname $DIR) +bash scripts/run_redis.sh + LVMPY_LOG_DIR="$PROJECT_DIR/tests/" \ HIDE_STREAM_LOG=true \ TEST_HOME_DIR="$PROJECT_DIR/tests/" \ diff --git a/setup.py b/setup.py index ebd27ab1..ad03a8b4 100644 --- a/setup.py +++ b/setup.py @@ -71,6 +71,7 @@ def find_version(*file_paths): 'sh==1.14.2', 'python-crontab==2.6.0', 'requests-mock==1.12.1', + 'redis==6.2.0' ], python_requires='>=3.8,<4', extras_require=extras_require, diff --git a/tests/mirage/__init__.py b/tests/mirage/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/core/core_mirage_test.py b/tests/mirage/mirage_node_cli_test.py similarity index 72% rename from tests/core/core_mirage_test.py rename to tests/mirage/mirage_node_cli_test.py index 62d58681..f5397975 100644 --- a/tests/core/core_mirage_test.py +++ b/tests/mirage/mirage_node_cli_test.py @@ -1,15 +1,15 @@ from unittest import mock from node_cli.configs import SKALE_DIR -from node_cli.core.mirage_boot import init as init_boot, migrate, update -from node_cli.core.mirage_node import restore_mirage +from node_cli.mirage.mirage_boot import init as init_boot, migrate, update +from node_cli.mirage.mirage_node import restore_mirage from node_cli.utils.node_type import NodeType -@mock.patch('node_cli.core.mirage_node.time.sleep') -@mock.patch('node_cli.core.mirage_node.restore_mirage_op') -@mock.patch('node_cli.core.mirage_node.save_env_params') -@mock.patch('node_cli.core.mirage_node.compose_node_env') +@mock.patch('node_cli.mirage.mirage_node.time.sleep') +@mock.patch('node_cli.mirage.mirage_node.restore_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.save_env_params') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') def test_restore_mirage( mock_compose_env, mock_save_env, @@ -32,10 +32,10 @@ def test_restore_mirage( mock_sleep.assert_called_once() -@mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.core.mirage_boot.time.sleep') -@mock.patch('node_cli.core.mirage_boot.init_mirage_boot_op') -@mock.patch('node_cli.core.mirage_boot.compose_node_env') +@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.mirage.mirage_boot.time.sleep') +@mock.patch('node_cli.mirage.mirage_boot.init_mirage_boot_op') +@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') def test_init_mirage_boot( mock_compose_env, mock_init_op, @@ -60,10 +60,10 @@ def test_init_mirage_boot( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.core.mirage_boot.time.sleep') -@mock.patch('node_cli.core.mirage_boot.migrate_mirage_boot_op') -@mock.patch('node_cli.core.mirage_boot.compose_node_env') +@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.mirage.mirage_boot.time.sleep') +@mock.patch('node_cli.mirage.mirage_boot.migrate_mirage_boot_op') +@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') def test_migrate_mirage_boot( mock_compose_env, mock_migrate_op, @@ -95,10 +95,10 @@ def test_migrate_mirage_boot( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.core.mirage_boot.time.sleep') -@mock.patch('node_cli.core.mirage_boot.update_mirage_boot_op') -@mock.patch('node_cli.core.mirage_boot.compose_node_env') +@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.mirage.mirage_boot.time.sleep') +@mock.patch('node_cli.mirage.mirage_boot.update_mirage_boot_op') +@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') def test_update_mirage_boot( mock_compose_env, mock_update_op, @@ -128,3 +128,7 @@ def test_update_mirage_boot( mock_update_op.assert_called_once_with(valid_env_file, mock_env) mock_sleep.assert_called_once() mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) + + +def test_mirage_repair(): + pass diff --git a/text.yml b/text.yml index 80461c76..b56202b0 100644 --- a/text.yml +++ b/text.yml @@ -40,7 +40,7 @@ service: ssl: no_cert: |- - No SSL certificates on the node. + No SSL certificates on the node. Run < skale ssl upload > to add new certificates. uploaded: |- Certificates are successfully uploaded. @@ -76,3 +76,11 @@ lvmpy: heal: help: Run healing procedure for lvmpy server prompt: Are you sure you want run healing procedure? + +mirage: + node: + repair: + help: Repair mirage chain node + warning: Are you sure you want to repair mirage chain node? In rare cases may cause data loss and require additional maintenance + snapshot_from: IP of the node to take snapshot from + toggle_repair: Repair mode is toggled From e4c03d9ac3e5e36bac117426977bb47dc3810375 Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 4 Jun 2025 13:09:34 +0100 Subject: [PATCH 081/332] Fix ChainRecord --- node_cli/mirage/record/chain_record.py | 1 + 1 file changed, 1 insertion(+) diff --git a/node_cli/mirage/record/chain_record.py b/node_cli/mirage/record/chain_record.py index 2d8a170e..34968be4 100644 --- a/node_cli/mirage/record/chain_record.py +++ b/node_cli/mirage/record/chain_record.py @@ -29,6 +29,7 @@ CHAIN_RECORD_FIELDS: dict[str, FieldInfo] = { + 'name': FieldInfo('name', str, ''), 'repair_date': FieldInfo('repair_date', datetime, datetime.fromtimestamp(0)), 'repair_ts': FieldInfo('repair_ts', int, None), 'snapshot_from': FieldInfo('snapshot_from', str, None), From a19681eb1485b14ee31fec6ffc9960a50d6ffcec Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 4 Jun 2025 13:18:03 +0100 Subject: [PATCH 082/332] Fix text.yml --- node_cli/cli/mirage_node.py | 4 ++-- node_cli/mirage/mirage_node.py | 4 ++-- text.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 57c5e3ab..ddcb4932 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -21,7 +21,7 @@ from yaml import safe_load from node_cli.core.node import get_node_signature, backup, get_node_info -from node_cli.mirage.mirage_node import restore_mirage, toggle_mirage_repair +from node_cli.mirage.mirage_node import restore_mirage, request_repair from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false, URL_TYPE from node_cli.utils.texts import safe_load_texts @@ -116,4 +116,4 @@ def restore_node(backup_path, env_file, config_only): help=TEXTS['mirage']['node']['repair']['snapshot_from'] ) def repair(snapshot_from: str | None = None) -> None: - toggle_mirage_repair(snapshot_from=snapshot_from) + request_repair(snapshot_from=snapshot_from) diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index b1b157cd..64fdc4f7 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -55,7 +55,7 @@ def restore_mirage(backup_path, env_filepath, config_only=False): @check_inited -def toggle_mirage_repair(snapshot_from: str | None = None) -> None: +def request_repair(snapshot_from: str | None = None) -> None: node_type = NodeType.MIRAGE env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type) params = get_static_params(node_type, env['ENV_TYPE']) @@ -64,4 +64,4 @@ def toggle_mirage_repair(snapshot_from: str | None = None) -> None: if snapshot_from: record.set_snapshot_from(snapshot_from) - print(TEXTS['mirage']['toggle_repair']) + print(TEXTS['mirage']['node']['repair']['repair_requested']) diff --git a/text.yml b/text.yml index b56202b0..c41b2add 100644 --- a/text.yml +++ b/text.yml @@ -83,4 +83,4 @@ mirage: help: Repair mirage chain node warning: Are you sure you want to repair mirage chain node? In rare cases may cause data loss and require additional maintenance snapshot_from: IP of the node to take snapshot from - toggle_repair: Repair mode is toggled + toggle_repair: Repair mode is requested From e0d4ed5eb4175b96c306718469afcf97339d4bd6 Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 4 Jun 2025 13:23:43 +0100 Subject: [PATCH 083/332] Fix text.yml --- text.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/text.yml b/text.yml index c41b2add..004dfced 100644 --- a/text.yml +++ b/text.yml @@ -83,4 +83,4 @@ mirage: help: Repair mirage chain node warning: Are you sure you want to repair mirage chain node? In rare cases may cause data loss and require additional maintenance snapshot_from: IP of the node to take snapshot from - toggle_repair: Repair mode is requested + repair_requested: Repair mode is requested From 2f4b647948fdcf2f2c59f7aeb885b3ee112941aa Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 4 Jun 2025 15:29:56 +0100 Subject: [PATCH 084/332] Use '' as default snapshot_from value --- node_cli/cli/mirage_node.py | 4 ++-- node_cli/mirage/mirage_node.py | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index ddcb4932..27361c0c 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -111,9 +111,9 @@ def restore_node(backup_path, env_file, config_only): @click.option( '--snapshot-from', type=URL_TYPE, - default=None, + default='', hidden=True, help=TEXTS['mirage']['node']['repair']['snapshot_from'] ) -def repair(snapshot_from: str | None = None) -> None: +def repair(snapshot_from: str = '') -> None: request_repair(snapshot_from=snapshot_from) diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index 64fdc4f7..e63c0401 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -55,13 +55,12 @@ def restore_mirage(backup_path, env_filepath, config_only=False): @check_inited -def request_repair(snapshot_from: str | None = None) -> None: +def request_repair(snapshot_from: str = '') -> None: node_type = NodeType.MIRAGE env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type) params = get_static_params(node_type, env['ENV_TYPE']) record = ChainRecord(params['info']['chain_name']) record.set_repair_ts(int(time.time())) - if snapshot_from: - record.set_snapshot_from(snapshot_from) + record.set_snapshot_from(snapshot_from) print(TEXTS['mirage']['node']['repair']['repair_requested']) From f80f2868863b72057f8189c2783cf83443889e2c Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 4 Jun 2025 16:17:28 +0100 Subject: [PATCH 085/332] Add mirage_repair test --- scripts/run_tests.sh | 2 +- tests/conftest.py | 9 ++ tests/core/core_logs_test.py | 4 +- tests/core/core_schains_test.py | 5 +- tests/helper.py | 9 +- tests/mirage/mirage_node_cli_test.py | 134 --------------------------- 6 files changed, 18 insertions(+), 145 deletions(-) delete mode 100644 tests/mirage/mirage_node_cli_test.py diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index 0d019759..c85201fc 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -10,4 +10,4 @@ LVMPY_LOG_DIR="$PROJECT_DIR/tests/" \ TEST_HOME_DIR="$PROJECT_DIR/tests/" \ GLOBAL_SKALE_DIR="$PROJECT_DIR/tests/etc/skale" \ DOTENV_FILEPATH='tests/test-env' \ - py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests $@ + py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests/mirage $@ diff --git a/tests/conftest.py b/tests/conftest.py index 369b2374..cf5c4bdc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -27,6 +27,7 @@ import docker import mock import pytest +import redis from node_cli.configs import ( CONTAINER_CONFIG_TMP_PATH, @@ -37,6 +38,7 @@ REMOVED_CONTAINERS_FOLDER_PATH, SCHAIN_NODE_DATA_PATH, NGINX_CONFIG_FILEPATH, + REDIS_URI ) from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH from node_cli.configs.ssl import SSL_FOLDER_PATH @@ -298,3 +300,10 @@ def set_env_var(name, value): del os.environ[name] else: os.environ[name] = old_value + + +@pytest.fixture +def redis_client(): + cpool = redis.ConnectionPool.from_url(REDIS_URI) + return redis.Redis(connection_pool=cpool) + return rs diff --git a/tests/core/core_logs_test.py b/tests/core/core_logs_test.py index 06494d2e..54a6d2cc 100644 --- a/tests/core/core_logs_test.py +++ b/tests/core/core_logs_test.py @@ -11,10 +11,8 @@ from node_cli.configs import G_CONF_HOME, SKALE_TMP_DIR from node_cli.utils.docker_utils import docker_client from node_cli.utils.helper import run_cmd, safe_mkdir +from tests.helper import CURRENT_TIMESTAMP, CURRENT_DATETIME - -CURRENT_TIMESTAMP = 1594903080 -CURRENT_DATETIME = datetime.utcfromtimestamp(CURRENT_TIMESTAMP) TEST_DUMP_DIR_PATH = os.path.join(SKALE_TMP_DIR, 'skale-logs-dump-2020-07-16--12-38-00') TEST_IMAGE = 'alpine' diff --git a/tests/core/core_schains_test.py b/tests/core/core_schains_test.py index 1681ce20..61706524 100644 --- a/tests/core/core_schains_test.py +++ b/tests/core/core_schains_test.py @@ -3,15 +3,12 @@ from unittest import mock from pathlib import Path - import freezegun from node_cli.core.schains import cleanup_sync_datadir, toggle_schain_repair_mode from node_cli.utils.helper import read_json - -CURRENT_TIMESTAMP = 1594903080 -CURRENT_DATETIME = datetime.datetime.utcfromtimestamp(CURRENT_TIMESTAMP) +from tests.helper import CURRENT_TIMESTAMP, CURRENT_DATETIME @freezegun.freeze_time(CURRENT_DATETIME) diff --git a/tests/helper.py b/tests/helper.py index 08d67c20..7bf18a98 100644 --- a/tests/helper.py +++ b/tests/helper.py @@ -17,13 +17,16 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import mock +import datetime import os - +from unittest import mock +from unittest.mock import Mock, MagicMock import requests from click.testing import CliRunner -from mock import Mock, MagicMock + +CURRENT_TIMESTAMP = 1594903080 +CURRENT_DATETIME = datetime.datetime.utcfromtimestamp(CURRENT_TIMESTAMP) BLOCK_DEVICE = os.getenv('BLOCK_DEVICE') diff --git a/tests/mirage/mirage_node_cli_test.py b/tests/mirage/mirage_node_cli_test.py deleted file mode 100644 index f5397975..00000000 --- a/tests/mirage/mirage_node_cli_test.py +++ /dev/null @@ -1,134 +0,0 @@ -from unittest import mock - -from node_cli.configs import SKALE_DIR -from node_cli.mirage.mirage_boot import init as init_boot, migrate, update -from node_cli.mirage.mirage_node import restore_mirage -from node_cli.utils.node_type import NodeType - - -@mock.patch('node_cli.mirage.mirage_node.time.sleep') -@mock.patch('node_cli.mirage.mirage_node.restore_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.save_env_params') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') -def test_restore_mirage( - mock_compose_env, - mock_save_env, - mock_restore_op, - mock_sleep, - valid_env_file, - ensure_meta_removed, -): - mock_env = {'ENV_TYPE': 'devnet'} - mock_compose_env.return_value = mock_env - mock_restore_op.return_value = True - backup_path = '/fake/backup' - - restore_mirage(backup_path, valid_env_file) - - mock_compose_env.assert_called_once_with(valid_env_file, node_type=NodeType.MIRAGE) - mock_save_env.assert_called_once_with(valid_env_file) - expected_env = {**mock_env, 'SKALE_DIR': SKALE_DIR} - mock_restore_op.assert_called_once_with(expected_env, backup_path, config_only=False) - mock_sleep.assert_called_once() - - -@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.mirage.mirage_boot.time.sleep') -@mock.patch('node_cli.mirage.mirage_boot.init_mirage_boot_op') -@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') -def test_init_mirage_boot( - mock_compose_env, - mock_init_op, - mock_sleep, - mock_is_alive, - valid_env_file, - ensure_meta_removed, -): - mock_env = {'ENV_TYPE': 'devnet'} - mock_compose_env.return_value = mock_env - - init_boot(valid_env_file) - - mock_compose_env.assert_called_once_with( - valid_env_file, - node_type=NodeType.MIRAGE, - is_mirage_boot=True, - ) - mock_init_op.assert_called_once_with(valid_env_file, mock_env) - mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) - - -@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.mirage.mirage_boot.time.sleep') -@mock.patch('node_cli.mirage.mirage_boot.migrate_mirage_boot_op') -@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') -def test_migrate_mirage_boot( - mock_compose_env, - mock_migrate_op, - mock_sleep, - mock_is_alive, - mock_is_user_valid, - valid_env_file, - inited_node, - resource_alloc, - meta_file_v3, -): - mock_env = {'ENV_TYPE': 'devnet'} - mock_compose_env.return_value = mock_env - mock_migrate_op.return_value = True - pull_config_for_schain = 'mirage' - - migrate(valid_env_file, pull_config_for_schain) - - mock_compose_env.assert_called_once_with( - valid_env_file, - inited_node=True, - sync_schains=False, - pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.MIRAGE, - ) - mock_migrate_op.assert_called_once_with(valid_env_file, mock_env) - mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE) - - -@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.mirage.mirage_boot.time.sleep') -@mock.patch('node_cli.mirage.mirage_boot.update_mirage_boot_op') -@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') -def test_update_mirage_boot( - mock_compose_env, - mock_update_op, - mock_sleep, - mock_is_alive, - mock_is_user_valid, - valid_env_file, - inited_node, - resource_alloc, - meta_file_v3, -): - mock_env = {'ENV_TYPE': 'devnet'} - mock_compose_env.return_value = mock_env - mock_update_op.return_value = True - pull_config_for_schain = 'mirage' - - update(valid_env_file, pull_config_for_schain) - - mock_compose_env.assert_called_once_with( - valid_env_file, - inited_node=True, - sync_schains=False, - pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.MIRAGE, - is_mirage_boot=True, - ) - mock_update_op.assert_called_once_with(valid_env_file, mock_env) - mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) - - -def test_mirage_repair(): - pass From e6c33c647b2462233ccd6494e52c795e5e7cab03 Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 4 Jun 2025 16:28:42 +0100 Subject: [PATCH 086/332] Fix linter --- .gitignore | 4 +- node_cli/cli/mirage_node.py | 5 +- node_cli/core/checks.py | 3 - node_cli/core/static_config.py | 6 +- tests/conftest.py | 3 +- tests/core/core_logs_test.py | 3 +- tests/core/core_schains_test.py | 1 - tests/mirage/mirage_node_test.py | 147 +++++++++++++++++++++++++++++++ 8 files changed, 159 insertions(+), 13 deletions(-) create mode 100644 tests/mirage/mirage_node_test.py diff --git a/.gitignore b/.gitignore index e183f4a7..c1b8fee2 100644 --- a/.gitignore +++ b/.gitignore @@ -122,4 +122,6 @@ test-env nginx.conf tests/.skale/node_data/docker.json tests/.skale/node_data/node_options.json -tests/.skale/config/nginx.conf.j2 \ No newline at end of file +tests/.skale/config/nginx.conf.j2 + +.zed diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 27361c0c..af4d8455 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -18,7 +18,6 @@ # along with this program. If not, see . import click -from yaml import safe_load from node_cli.core.node import get_node_signature, backup, get_node_info from node_cli.mirage.mirage_node import restore_mirage, request_repair @@ -106,14 +105,14 @@ def restore_node(backup_path, env_file, config_only): is_flag=True, callback=abort_if_false, expose_value=False, - prompt=TEXTS['mirage']['node']['repair']['warning'] + prompt=TEXTS['mirage']['node']['repair']['warning'], ) @click.option( '--snapshot-from', type=URL_TYPE, default='', hidden=True, - help=TEXTS['mirage']['node']['repair']['snapshot_from'] + help=TEXTS['mirage']['node']['repair']['snapshot_from'], ) def repair(snapshot_from: str = '') -> None: request_repair(snapshot_from=snapshot_from) diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index 951a6b22..929cc60b 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -45,7 +45,6 @@ import docker # type: ignore import psutil # type: ignore -import yaml from debian import debian_support from packaging.version import parse as version_parse @@ -55,8 +54,6 @@ DOCKER_CONFIG_FILEPATH, DOCKER_DAEMON_HOSTS, REPORTS_PATH, - STATIC_PARAMS_FILEPATH, - MIRAGE_STATIC_PARAMS_FILEPATH, ) from node_cli.core.host import is_ufw_ipv6_chain_exists, is_ufw_ipv6_option_enabled from node_cli.core.resources import get_disk_size diff --git a/node_cli/core/static_config.py b/node_cli/core/static_config.py index f3fcf923..d5252687 100644 --- a/node_cli/core/static_config.py +++ b/node_cli/core/static_config.py @@ -1,7 +1,11 @@ import os import yaml -from node_cli.configs import CONTAINER_CONFIG_PATH, MIRAGE_STATIC_PARAMS_FILEPATH, STATIC_PARAMS_FILEPATH +from node_cli.configs import ( + CONTAINER_CONFIG_PATH, + MIRAGE_STATIC_PARAMS_FILEPATH, + STATIC_PARAMS_FILEPATH, +) from node_cli.utils.node_type import NodeType diff --git a/tests/conftest.py b/tests/conftest.py index cf5c4bdc..17b43c4a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -38,7 +38,7 @@ REMOVED_CONTAINERS_FOLDER_PATH, SCHAIN_NODE_DATA_PATH, NGINX_CONFIG_FILEPATH, - REDIS_URI + REDIS_URI, ) from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH from node_cli.configs.ssl import SSL_FOLDER_PATH @@ -306,4 +306,3 @@ def set_env_var(name, value): def redis_client(): cpool = redis.ConnectionPool.from_url(REDIS_URI) return redis.Redis(connection_pool=cpool) - return rs diff --git a/tests/core/core_logs_test.py b/tests/core/core_logs_test.py index 54a6d2cc..b3414797 100644 --- a/tests/core/core_logs_test.py +++ b/tests/core/core_logs_test.py @@ -2,7 +2,6 @@ import time import shlex import shutil -from datetime import datetime import pytest import freezegun @@ -11,7 +10,7 @@ from node_cli.configs import G_CONF_HOME, SKALE_TMP_DIR from node_cli.utils.docker_utils import docker_client from node_cli.utils.helper import run_cmd, safe_mkdir -from tests.helper import CURRENT_TIMESTAMP, CURRENT_DATETIME +from tests.helper import CURRENT_DATETIME TEST_DUMP_DIR_PATH = os.path.join(SKALE_TMP_DIR, 'skale-logs-dump-2020-07-16--12-38-00') diff --git a/tests/core/core_schains_test.py b/tests/core/core_schains_test.py index 61706524..4b8e0fcb 100644 --- a/tests/core/core_schains_test.py +++ b/tests/core/core_schains_test.py @@ -1,5 +1,4 @@ import os -import datetime from unittest import mock from pathlib import Path diff --git a/tests/mirage/mirage_node_test.py b/tests/mirage/mirage_node_test.py new file mode 100644 index 00000000..285729f4 --- /dev/null +++ b/tests/mirage/mirage_node_test.py @@ -0,0 +1,147 @@ +from unittest import mock + +import freezegun + +from node_cli.configs import SKALE_DIR +from node_cli.mirage.mirage_boot import init as init_boot, migrate, update +from node_cli.mirage.mirage_node import request_repair, restore_mirage +from node_cli.utils.node_type import NodeType +from tests.helper import CURRENT_DATETIME, CURRENT_TIMESTAMP + + +@mock.patch('node_cli.mirage.mirage_node.time.sleep') +@mock.patch('node_cli.mirage.mirage_node.restore_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.save_env_params') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +def test_restore_mirage( + mock_compose_env, + mock_save_env, + mock_restore_op, + mock_sleep, + valid_env_file, + ensure_meta_removed, +): + mock_env = {'ENV_TYPE': 'devnet'} + mock_compose_env.return_value = mock_env + mock_restore_op.return_value = True + backup_path = '/fake/backup' + + restore_mirage(backup_path, valid_env_file) + + mock_compose_env.assert_called_once_with(valid_env_file, node_type=NodeType.MIRAGE) + mock_save_env.assert_called_once_with(valid_env_file) + expected_env = {**mock_env, 'SKALE_DIR': SKALE_DIR} + mock_restore_op.assert_called_once_with(expected_env, backup_path, config_only=False) + mock_sleep.assert_called_once() + + +@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.mirage.mirage_boot.time.sleep') +@mock.patch('node_cli.mirage.mirage_boot.init_mirage_boot_op') +@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') +def test_init_mirage_boot( + mock_compose_env, + mock_init_op, + mock_sleep, + mock_is_alive, + valid_env_file, + ensure_meta_removed, +): + mock_env = {'ENV_TYPE': 'devnet'} + mock_compose_env.return_value = mock_env + + init_boot(valid_env_file) + + mock_compose_env.assert_called_once_with( + valid_env_file, + node_type=NodeType.MIRAGE, + is_mirage_boot=True, + ) + mock_init_op.assert_called_once_with(valid_env_file, mock_env) + mock_sleep.assert_called_once() + mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.mirage.mirage_boot.time.sleep') +@mock.patch('node_cli.mirage.mirage_boot.migrate_mirage_boot_op') +@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') +def test_migrate_mirage_boot( + mock_compose_env, + mock_migrate_op, + mock_sleep, + mock_is_alive, + mock_is_user_valid, + valid_env_file, + inited_node, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet'} + mock_compose_env.return_value = mock_env + mock_migrate_op.return_value = True + pull_config_for_schain = 'mirage' + + migrate(valid_env_file, pull_config_for_schain) + + mock_compose_env.assert_called_once_with( + valid_env_file, + inited_node=True, + sync_schains=False, + pull_config_for_schain=pull_config_for_schain, + node_type=NodeType.MIRAGE, + ) + mock_migrate_op.assert_called_once_with(valid_env_file, mock_env) + mock_sleep.assert_called_once() + mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.mirage.mirage_boot.time.sleep') +@mock.patch('node_cli.mirage.mirage_boot.update_mirage_boot_op') +@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') +def test_update_mirage_boot( + mock_compose_env, + mock_update_op, + mock_sleep, + mock_is_alive, + mock_is_user_valid, + valid_env_file, + inited_node, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet'} + mock_compose_env.return_value = mock_env + mock_update_op.return_value = True + pull_config_for_schain = 'mirage' + + update(valid_env_file, pull_config_for_schain) + + mock_compose_env.assert_called_once_with( + valid_env_file, + inited_node=True, + sync_schains=False, + pull_config_for_schain=pull_config_for_schain, + node_type=NodeType.MIRAGE, + is_mirage_boot=True, + ) + mock_update_op.assert_called_once_with(valid_env_file, mock_env) + mock_sleep.assert_called_once() + mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) + + +@freezegun.freeze_time(CURRENT_DATETIME) +@mock.patch('node_cli.mirage.mirage_node.compose_node_env', return_value={'ENV_TYPE': 'devnet'}) +@mock.patch( + 'node_cli.mirage.mirage_node.get_static_params', return_value={'info': {'chain_name': 'test'}} +) +def test_mirage_repair(compsoe_node_env_mock, get_statis_params_mock, redis_client, inited_node): + request_repair() + assert redis_client.get('test_repair_ts') == f'{CURRENT_TIMESTAMP}'.encode('utf-8') + assert redis_client.get('test_snapshot_from') == b'' + request_repair(snapshot_from='127.0.0.1') + assert redis_client.get('test_repair_ts') == f'{CURRENT_TIMESTAMP}'.encode('utf-8') + assert redis_client.get('test_snapshot_from') == b'127.0.0.1' From fc4f0cc9ea33df2a729672281d24afaa4dd2d4a3 Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 10 Jun 2025 17:17:33 +0100 Subject: [PATCH 087/332] Add new cmds for mirage. Restructure CliMeta --- node_cli/cli/mirage_node.py | 17 +- node_cli/cli/node.py | 4 +- node_cli/core/mirage_node.py | 29 ++- node_cli/core/node.py | 6 +- node_cli/operations/__init__.py | 6 +- node_cli/operations/base.py | 84 +++------ .../{skale_node.py => config_repo.py} | 0 node_cli/operations/mirage.py | 139 +++++++++++++++ node_cli/utils/meta.py | 168 ++++++++++++------ 9 files changed, 331 insertions(+), 122 deletions(-) rename node_cli/operations/{skale_node.py => config_repo.py} (100%) create mode 100644 node_cli/operations/mirage.py diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index e983dc47..d3fe259a 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -20,7 +20,7 @@ import click from node_cli.core.node import get_node_signature, backup, get_node_info -from node_cli.core.mirage_node import restore_mirage +from node_cli.core.mirage_node import migrate_from_boot, restore_mirage from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false @@ -94,3 +94,18 @@ def backup_node(backup_folder_path): @streamed_cmd def restore_node(backup_path, env_file, config_only): restore_mirage(backup_path, env_file, config_only) + + +@node.command('migrate', help='Switch from boot to regular Mirage node.') +@click.argument('env_filepath') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to migrate to regular Mirage node? The action cannot be undone', +) +@streamed_cmd +def migrate_node(env_filepath: str) -> None: + click.echo("Placeholder: Command 'mirage node migrate' is not yet implemented.") + migrate_from_boot(env_filepath=env_filepath) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index a7f3d17c..db4d6fa5 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -41,7 +41,7 @@ from node_cli.utils.decorators import check_inited from node_cli.utils.helper import abort_if_false, streamed_cmd, IP_TYPE from node_cli.utils.texts import safe_load_texts -from node_cli.utils.meta import get_meta_info +from node_cli.utils.meta import CliMetaManager from node_cli.utils.print_formatters import print_meta_info @@ -248,7 +248,7 @@ def configure_firewall(monitoring): @check_inited @click.option('--json', 'raw', is_flag=True, help=TEXTS['common']['json']) def version(raw: bool) -> None: - meta_info = get_meta_info(raw=raw) + meta_info = CliMetaManager().get_meta_info(raw=raw) if raw: print(meta_info) else: diff --git a/node_cli/core/mirage_node.py b/node_cli/core/mirage_node.py index 4a4101f9..2eff8c22 100644 --- a/node_cli/core/mirage_node.py +++ b/node_cli/core/mirage_node.py @@ -23,13 +23,13 @@ from node_cli.configs import SKALE_DIR, RESTORE_SLEEP_TIMEOUT from node_cli.core.host import save_env_params -from node_cli.core.node import compose_node_env -from node_cli.utils.decorators import check_not_inited +from node_cli.core.node import compose_node_env, is_base_containers_alive +from node_cli.operations import update_mirage_op, restore_mirage_op, MirageUpdateType +from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeType -from node_cli.operations import restore_mirage_op - +from node_cli.utils.print_formatters import print_node_cmd_error logger = logging.getLogger(__name__) @@ -47,3 +47,24 @@ def restore_mirage(backup_path, env_filepath, config_only=False): error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) time.sleep(RESTORE_SLEEP_TIMEOUT) print('Mirage node is restored from backup') + + +@check_inited +@check_user +def migrate_from_boot( + env_filepath: str, +) -> None: + logger.info('Node update started') + env = compose_node_env( + env_filepath, + inited_node=True, + sync_schains=False, + node_type=NodeType.MIRAGE, + ) + migrate_ok = update_mirage_op(env_filepath, env, update_type=MirageUpdateType.INFRA_ONLY) + alive = is_base_containers_alive(node_type=NodeType.MIRAGE) + if not migrate_ok or not alive: + print_node_cmd_error() + return + else: + logger.info('Mirgration from boot to mirage completed successfully') diff --git a/node_cli/core/node.py b/node_cli/core/node.py index f6b534cd..1d4b49d7 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -69,7 +69,7 @@ get_request, post_request, ) -from node_cli.utils.meta import get_meta_info +from node_cli.utils.meta import CliMetaManager from node_cli.utils.texts import safe_load_texts from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.decorators import check_not_inited, check_inited, check_user @@ -198,7 +198,7 @@ def init_sync( @check_user def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: logger.info('Node update started') - prev_version = get_meta_info().version + prev_version = CliMetaManager().get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() env = compose_node_env(env_filepath, node_type=NodeType.SYNC) @@ -284,7 +284,7 @@ def update( error_msg = 'Cannot update safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) - prev_version = get_meta_info().version + prev_version = CliMetaManager().get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() logger.info('Node update started') diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 159c0e16..a0cafed8 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -28,7 +28,11 @@ turn_off as turn_off_op, turn_on as turn_on_op, restore as restore_op, - restore_mirage as restore_mirage_op, cleanup_sync as cleanup_sync_op, configure_nftables, ) +from node_cli.operations.mirage import ( # noqa + update_mirage as update_mirage_op, + MirageUpdateType, + restore_mirage as restore_mirage_op, +) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index ccc29a1b..521620e0 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -48,7 +48,7 @@ from node_cli.cli.info import VERSION, TYPE from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive from node_cli.operations.docker_lvmpy import lvmpy_install -from node_cli.operations.skale_node import ( +from node_cli.operations.config_repo import ( download_skale_node, sync_skale_node, update_images, @@ -65,7 +65,7 @@ remove_dynamic_containers, ) from node_cli.utils.helper import str_to_bool, rm_dir -from node_cli.utils.meta import get_meta_info, update_meta +from node_cli.utils.meta import CliMetaManager from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks @@ -127,7 +127,8 @@ def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) init_shared_space_volume(env['ENV_TYPE']) - current_stream = get_meta_info().config_stream + meta_manager = CliMetaManager() + current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: logger.info( @@ -137,7 +138,7 @@ def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: ) docker_cleanup() - update_meta( + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], @@ -167,7 +168,8 @@ def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: prepare_host(env_filepath, env['ENV_TYPE']) - current_stream = get_meta_info().config_stream + meta_manager = CliMetaManager() + current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: logger.info( @@ -177,7 +179,7 @@ def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: ) docker_cleanup() - update_meta( + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], None, @@ -209,7 +211,8 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: prepare_host(env_filepath, env['ENV_TYPE']) - current_stream = get_meta_info().config_stream + meta_manager = CliMetaManager() + current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: logger.info( @@ -219,7 +222,7 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: ) docker_cleanup() - update_meta( + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], None, @@ -252,7 +255,8 @@ def init(env_filepath: str, env: dict, node_type: NodeType) -> None: lvmpy_install(env) init_shared_space_volume(env['ENV_TYPE']) - update_meta( + meta_manager = CliMetaManager() + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], @@ -285,7 +289,8 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: generate_nginx_config() prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') - update_meta( + meta_manager = CliMetaManager() + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], None, @@ -331,7 +336,8 @@ def init_sync( generate_nginx_config() prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') - update_meta( + meta_manager = CliMetaManager() + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], @@ -370,7 +376,8 @@ def update_sync(env_filepath: str, env: Dict) -> bool: prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) - update_meta( + meta_manager = CliMetaManager() + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], @@ -392,7 +399,8 @@ def turn_off(env: dict, node_type: NodeType) -> None: def turn_on(env: dict, node_type: NodeType) -> None: logger.info('Turning on the node...') - update_meta( + meta_manager = CliMetaManager() + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], @@ -434,7 +442,8 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): lvmpy_install(env) init_shared_space_volume(env['ENV_TYPE']) - update_meta( + meta_manager = CliMetaManager() + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], env['DOCKER_LVMPY_STREAM'], @@ -459,53 +468,6 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): return True -def restore_mirage(env, backup_path, config_only=False): - unpack_backup_archive(backup_path) - failed_checks = run_host_checks( - env['DISK_MOUNTPOINT'], - TYPE, - env['ENV_TYPE'], - CONTAINER_CONFIG_PATH, - check_type=CheckType.PREINSTALL, - ) - if failed_checks: - print_failed_requirements_checks(failed_checks) - return False - - ensure_btrfs_kernel_module_autoloaded() - - if env.get('SKIP_DOCKER_CONFIG') != 'True': - configure_docker() - - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) - - link_env_file() - - update_meta( - VERSION, - env['CONTAINER_CONFIGS_STREAM'], - None, - distro.id(), - distro.version(), - ) - - if not config_only: - compose_up(env=env, node_type=NodeType.MIRAGE) - - failed_checks = run_host_checks( - env['DISK_MOUNTPOINT'], - TYPE, - env['ENV_TYPE'], - CONTAINER_CONFIG_PATH, - check_type=CheckType.POSTINSTALL, - ) - if failed_checks: - print_failed_requirements_checks(failed_checks) - return False - return True - - def cleanup_sync(env, schain_name: str) -> None: turn_off(env, node_type=NodeType.SYNC) cleanup_sync_datadir(schain_name=schain_name) diff --git a/node_cli/operations/skale_node.py b/node_cli/operations/config_repo.py similarity index 100% rename from node_cli/operations/skale_node.py rename to node_cli/operations/config_repo.py diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py new file mode 100644 index 00000000..4504c9d3 --- /dev/null +++ b/node_cli/operations/mirage.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2021-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import logging +from enum import Enum + +import distro + +from node_cli.cli.info import TYPE, VERSION +from node_cli.configs import CONTAINER_CONFIG_PATH +from node_cli.core.checks import CheckType +from node_cli.core.checks import run_checks as run_host_checks +from node_cli.core.docker_config import configure_docker +from node_cli.core.host import ensure_btrfs_kernel_module_autoloaded, link_env_file, prepare_host +from node_cli.core.nftables import configure_nftables +from node_cli.core.nginx import generate_nginx_config +from node_cli.operations.base import checked_host +from node_cli.operations.common import unpack_backup_archive +from node_cli.operations.config_repo import ( + sync_skale_node, + update_images, +) +from node_cli.utils.docker_utils import ( + NodeType, + compose_rm, + compose_up, + docker_cleanup, + remove_dynamic_containers, +) +from node_cli.utils.helper import str_to_bool +from node_cli.utils.meta import MirageCliMetaManager +from node_cli.utils.print_formatters import print_failed_requirements_checks + +logger = logging.getLogger(__name__) + + +class MirageUpdateType(Enum): + REGULAR = 'regular' + INFRA_ONLY = 'infra_only' + + +@checked_host +def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) -> bool: + compose_rm(node_type=NodeType.MIRAGE, env=env) + if update_type != MirageUpdateType.INFRA_ONLY: + remove_dynamic_containers() + sync_skale_node() + ensure_btrfs_kernel_module_autoloaded() + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + configure_nftables() + generate_nginx_config() + + prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) + meta_manager = MirageCliMetaManager() + current_stream = meta_manager.get_meta_info().config_stream + skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' + if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + logger.info( + 'Stream version was changed from %s to %s', + current_stream, + env['CONTAINER_CONFIGS_STREAM'], + ) + docker_cleanup() + + MirageCliMetaManager().update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + distro.id(), + distro.version(), + ) + update_images(env=env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.MIRAGE) + return True + + +def restore_mirage(env, backup_path, config_only=False): + unpack_backup_archive(backup_path) + failed_checks = run_host_checks( + env['DISK_MOUNTPOINT'], + TYPE, + env['ENV_TYPE'], + CONTAINER_CONFIG_PATH, + check_type=CheckType.PREINSTALL, + ) + if failed_checks: + print_failed_requirements_checks(failed_checks) + return False + + ensure_btrfs_kernel_module_autoloaded() + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) + configure_nftables(enable_monitoring=enable_monitoring) + + link_env_file() + + meta_manager = MirageCliMetaManager() + meta_manager.update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + distro.id(), + distro.version(), + ) + + if not config_only: + compose_up(env=env, node_type=NodeType.MIRAGE) + + failed_checks = run_host_checks( + env['DISK_MOUNTPOINT'], + TYPE, + env['ENV_TYPE'], + CONTAINER_CONFIG_PATH, + check_type=CheckType.POSTINSTALL, + ) + if failed_checks: + print_failed_requirements_checks(failed_checks) + return False + return True diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index a8237026..a5c2e593 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -1,7 +1,9 @@ +import abc import json import os -from collections import namedtuple +from dataclasses import dataclass from typing import Optional + from node_cli.configs import META_FILEPATH DEFAULT_VERSION = '1.0.0' @@ -11,62 +13,128 @@ DEFAULT_OS_VERSION = '18.04' -class CliMeta( - namedtuple('Node', ['version', 'config_stream', 'docker_lvmpy_stream', 'os_id', 'os_version']) -): - __slots__ = () - - def __new__( - cls, - version=DEFAULT_VERSION, - config_stream=DEFAULT_CONFIG_STREAM, - docker_lvmpy_stream: Optional[str] = DEFAULT_DOCKER_LVMPY_STREAM, - os_id=DEFAULT_OS_ID, - os_version=DEFAULT_OS_VERSION, - ): - return super(CliMeta, cls).__new__( - cls, version, config_stream, docker_lvmpy_stream, os_id, os_version - ) +@dataclass +class CliMetaBase(abc.ABC): + version: str = DEFAULT_VERSION + config_stream: str = DEFAULT_CONFIG_STREAM + os_id: str = DEFAULT_OS_ID + os_version: str = DEFAULT_OS_VERSION + + @abc.abstractmethod + def asdict(self) -> dict: + pass + + +class CliMeta(CliMetaBase): + docker_lvmpy_stream: str = DEFAULT_DOCKER_LVMPY_STREAM + + def asdict(self) -> dict: + return { + 'version': self.version, + 'config_stream': self.config_stream, + 'docker_lvmpy_stream': self.docker_lvmpy_stream, + 'os_id': self.os_id, + 'os_version': self.os_version, + } + + +class MirageCliMeta(CliMetaBase): + def asdict(self) -> dict: + return { + 'version': self.version, + 'config_stream': self.config_stream, + 'os_id': self.os_id, + 'os_version': self.os_version, + } -def get_meta_info(raw: bool = False) -> CliMeta: - if not os.path.isfile(META_FILEPATH): - return None - with open(META_FILEPATH) as meta_file: - plain_meta = json.load(meta_file) - if raw: - return plain_meta - return CliMeta(**plain_meta) +class BaseCliMetaManager(abc.ABC): + def __init__(self, meta_filepath: str = META_FILEPATH): + self.meta_filepath = meta_filepath + def _get_plain_meta(self) -> dict: + if not os.path.isfile(self.meta_filepath): + return {} + with open(self.meta_filepath) as meta_file: + return json.load(meta_file) -def save_meta(meta: CliMeta) -> None: - with open(META_FILEPATH, 'w') as meta_file: - json.dump(meta._asdict(), meta_file) + @abc.abstractmethod + def get_meta_info(self, raw: bool = False) -> CliMetaBase | dict: + pass + def save_meta(self, meta: CliMetaBase) -> None: + with open(self.meta_filepath, 'w') as meta_file: + json.dump(meta.asdict(), meta_file) -def compose_default_meta() -> CliMeta: - return CliMeta( - version=DEFAULT_VERSION, - docker_lvmpy_stream=DEFAULT_DOCKER_LVMPY_STREAM, - config_stream=DEFAULT_CONFIG_STREAM, - os_id=DEFAULT_OS_ID, - os_version=DEFAULT_OS_VERSION, - ) + @abc.abstractmethod + def compose_default_meta(self) -> CliMetaBase: + pass + def ensure_meta(self, meta: CliMetaBase | None = None) -> None: + if not self.get_meta_info(): + meta = meta or self.compose_default_meta() + self.save_meta(meta) -def ensure_meta(meta: Optional[CliMeta] = None) -> None: - if not get_meta_info(): - meta = meta or compose_default_meta() - save_meta(meta) + @abc.abstractmethod + def update_meta(self, *args, **kwargs) -> None: + pass -def update_meta( - version: str, - config_stream: str, - docker_lvmpy_stream: Optional[str], - os_id: str, - os_version: str, -) -> None: - ensure_meta() - meta = CliMeta(version, config_stream, docker_lvmpy_stream, os_id, os_version) - save_meta(meta) +class CliMetaManager(BaseCliMetaManager): + def get_meta_info(self, raw: bool = False) -> CliMeta | dict: + plain_meta = self._get_plain_meta() + required_fields = set(CliMeta.__dataclass_fields__.keys()) + clean_plain_meta = {k: v for k, v in plain_meta.items() if k in required_fields} + if raw: + return clean_plain_meta + return CliMeta(**clean_plain_meta) + + def compose_default_meta(self) -> CliMeta: + return CliMeta( + version=DEFAULT_VERSION, + docker_lvmpy_stream=DEFAULT_DOCKER_LVMPY_STREAM, + config_stream=DEFAULT_CONFIG_STREAM, + os_id=DEFAULT_OS_ID, + os_version=DEFAULT_OS_VERSION, + ) + + def update_meta( + self, + version: str, + config_stream: str, + docker_lvmpy_stream: Optional[str], + os_id: str, + os_version: str, + ) -> None: + self.ensure_meta() + meta = CliMeta(version, config_stream, docker_lvmpy_stream, os_id, os_version) + self.save_meta(meta) + + +class MirageCliMetaManager(BaseCliMetaManager): + def get_meta_info(self, raw: bool = False) -> MirageCliMeta | dict: + plain_meta = self._get_plain_meta() + required_fields = set(MirageCliMeta.__dataclass_fields__.keys()) + clean_plain_meta = {k: v for k, v in plain_meta.items() if k in required_fields} + if raw: + return clean_plain_meta + return MirageCliMeta(**clean_plain_meta) + + def compose_default_meta(self) -> MirageCliMeta: + return MirageCliMeta( + version=DEFAULT_VERSION, + config_stream=DEFAULT_CONFIG_STREAM, + os_id=DEFAULT_OS_ID, + os_version=DEFAULT_OS_VERSION, + ) + + def update_meta( + self, + version: str, + config_stream: str, + os_id: str, + os_version: str, + ) -> None: + self.ensure_meta() + meta = MirageCliMeta(version, config_stream, os_id, os_version) + self.save_meta(meta) From 66e829a2abca15ac7de7821f5b9375befd18800c Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 10 Jun 2025 20:18:02 +0100 Subject: [PATCH 088/332] Fix tests. Add addtional ones --- node_cli/utils/meta.py | 31 +++-- scripts/run_tests.sh | 2 +- tests/cli/mirage_cli_test.py | 10 ++ tests/cli/node_test.py | 8 +- tests/cli/sync_node_test.py | 7 +- tests/core/core_mirage_test.py | 32 ++++- tests/core/core_node_test.py | 2 +- tests/tools_meta_test.py | 246 ++++++++++++++++++++++++++++++--- 8 files changed, 297 insertions(+), 41 deletions(-) diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index a5c2e593..4b82c300 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -25,6 +25,7 @@ def asdict(self) -> dict: pass +@dataclass class CliMeta(CliMetaBase): docker_lvmpy_stream: str = DEFAULT_DOCKER_LVMPY_STREAM @@ -38,6 +39,7 @@ def asdict(self) -> dict: } +@dataclass class MirageCliMeta(CliMetaBase): def asdict(self) -> dict: return { @@ -49,7 +51,7 @@ def asdict(self) -> dict: class BaseCliMetaManager(abc.ABC): - def __init__(self, meta_filepath: str = META_FILEPATH): + def __init__(self, meta_filepath: str = META_FILEPATH) -> None: self.meta_filepath = meta_filepath def _get_plain_meta(self) -> dict: @@ -59,7 +61,7 @@ def _get_plain_meta(self) -> dict: return json.load(meta_file) @abc.abstractmethod - def get_meta_info(self, raw: bool = False) -> CliMetaBase | dict: + def get_meta_info(self, raw: bool = False) -> CliMetaBase | dict | None: pass def save_meta(self, meta: CliMetaBase) -> None: @@ -81,10 +83,13 @@ def update_meta(self, *args, **kwargs) -> None: class CliMetaManager(BaseCliMetaManager): - def get_meta_info(self, raw: bool = False) -> CliMeta | dict: + def get_meta_info(self, raw: bool = False) -> CliMeta | dict | None: plain_meta = self._get_plain_meta() - required_fields = set(CliMeta.__dataclass_fields__.keys()) - clean_plain_meta = {k: v for k, v in plain_meta.items() if k in required_fields} + if not raw and not plain_meta: + return None + allowed_fields = set(CliMeta.__dataclass_fields__.keys()) + clean_plain_meta = {k: v for k, v in plain_meta.items() if k in allowed_fields} + if raw: return clean_plain_meta return CliMeta(**clean_plain_meta) @@ -107,15 +112,23 @@ def update_meta( os_version: str, ) -> None: self.ensure_meta() - meta = CliMeta(version, config_stream, docker_lvmpy_stream, os_id, os_version) + meta = CliMeta( + version, + config_stream, + os_id, + os_version, + docker_lvmpy_stream, + ) self.save_meta(meta) class MirageCliMetaManager(BaseCliMetaManager): - def get_meta_info(self, raw: bool = False) -> MirageCliMeta | dict: + def get_meta_info(self, raw: bool = False) -> MirageCliMeta | dict | None: plain_meta = self._get_plain_meta() - required_fields = set(MirageCliMeta.__dataclass_fields__.keys()) - clean_plain_meta = {k: v for k, v in plain_meta.items() if k in required_fields} + if not raw and not plain_meta: + return None + allowed_fields = set(MirageCliMeta.__dataclass_fields__.keys()) + clean_plain_meta = {k: v for k, v in plain_meta.items() if k in allowed_fields} if raw: return clean_plain_meta return MirageCliMeta(**clean_plain_meta) diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index 97676b9a..efc72c6c 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -8,4 +8,4 @@ LVMPY_LOG_DIR="$PROJECT_DIR/tests/" \ TEST_HOME_DIR="$PROJECT_DIR/tests/" \ GLOBAL_SKALE_DIR="$PROJECT_DIR/tests/etc/skale" \ DOTENV_FILEPATH='tests/test-env' \ - py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests $@ + py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests/ $@ diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py index 1a96c22a..3aa2715a 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/mirage_cli_test.py @@ -9,6 +9,7 @@ init_node as init_node_placeholder, register_node as register_node_placeholder, update_node as update_node_placeholder, + migrate_node, ) from node_cli.cli.mirage_boot import ( init_boot, @@ -166,3 +167,12 @@ def test_mirage_boot_migrate_pull_config(mock_migrate_core, valid_env_file): assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' mock_migrate_core.assert_called_once_with(valid_env_file, schain_name) + + +@mock.patch('node_cli.cli.mirage_node.migrate_from_boot') +def test_mirage_node_migrate(mock_migrate_core, valid_env_file): + runner = CliRunner() + result = runner.invoke(migrate_node, ['--yes', valid_env_file]) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_migrate_core.assert_called_once_with(valid_env_file, None) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index d1dc82b7..85163c3a 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -334,9 +334,13 @@ def test_restore(mocked_g_config): patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), patch('node_cli.utils.decorators.is_node_inited', return_value=False), patch( - 'node_cli.core.node.get_meta_info', + 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), ), + # patch( + # 'node_cli.core.node.get_meta_info', + # return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), + # ), patch('node_cli.operations.base.configure_nftables'), patch('node_cli.configs.env.validate_env_params'), ): @@ -360,7 +364,7 @@ def test_restore_no_snapshot(mocked_g_config): patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), patch('node_cli.utils.decorators.is_node_inited', return_value=False), patch( - 'node_cli.core.node.get_meta_info', + 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), ), patch('node_cli.operations.base.configure_nftables'), diff --git a/tests/cli/sync_node_test.py b/tests/cli/sync_node_test.py index 9206245a..27ecbb52 100644 --- a/tests/cli/sync_node_test.py +++ b/tests/cli/sync_node_test.py @@ -59,7 +59,6 @@ def test_init_sync(mocked_g_config, clean_node_options): def test_init_sync_archive(mocked_g_config, clean_node_options): pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) - # with mock.patch('subprocess.run', new=subprocess_run_mock), \ with ( mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.operations.base.cleanup_volume_artifacts'), @@ -71,7 +70,7 @@ def test_init_sync_archive(mocked_g_config, clean_node_options): mock.patch('node_cli.operations.base.link_env_file'), mock.patch('node_cli.operations.base.generate_nginx_config'), mock.patch('node_cli.operations.base.prepare_block_device'), - mock.patch('node_cli.operations.base.update_meta'), + mock.patch('node_cli.operations.base.CliMetaManager.update_meta'), mock.patch('node_cli.operations.base.update_resource_allocation'), mock.patch('node_cli.operations.base.update_images'), mock.patch('node_cli.operations.base.compose_up'), @@ -118,7 +117,7 @@ def test_update_sync(mocked_g_config): mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), mock.patch( - 'node_cli.core.node.get_meta_info', + 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), mock.patch('node_cli.configs.env.validate_env_params'), @@ -139,7 +138,7 @@ def test_cleanup_sync(mocked_g_config): mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), mock.patch('node_cli.core.node.compose_node_env', return_value={'SCHAIN_NAME': 'test'}), mock.patch( - 'node_cli.core.node.get_meta_info', + 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), ): diff --git a/tests/core/core_mirage_test.py b/tests/core/core_mirage_test.py index 62d58681..171d4fed 100644 --- a/tests/core/core_mirage_test.py +++ b/tests/core/core_mirage_test.py @@ -2,7 +2,8 @@ from node_cli.configs import SKALE_DIR from node_cli.core.mirage_boot import init as init_boot, migrate, update -from node_cli.core.mirage_node import restore_mirage +from node_cli.core.mirage_node import restore_mirage, migrate_from_boot +from node_cli.operations.mirage import MirageUpdateType from node_cli.utils.node_type import NodeType @@ -128,3 +129,32 @@ def test_update_mirage_boot( mock_update_op.assert_called_once_with(valid_env_file, mock_env) mock_sleep.assert_called_once() mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) + + +@mock.patch('node_cli.core.mirage_node.update_mirage_op') +@mock.patch('node_cli.core.mirage_node.compose_node_env') +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +def test_migrate_from_boot( + mock_is_user_valid, + mock_compose_env, + mock_migrate_op, + valid_env_file, + inited_node, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet'} + mock_compose_env.return_value = mock_env + mock_migrate_op.return_value = True + + migrate_from_boot(valid_env_file) + + mock_compose_env.assert_called_once_with( + valid_env_file, + inited_node=True, + sync_schains=False, + node_type=NodeType.MIRAGE, + ) + mock_migrate_op.assert_called_once_with( + valid_env_file, mock_env, update_type=MirageUpdateType.INFRA_ONLY + ) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index c71e4683..27c96cf7 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -311,7 +311,7 @@ def test_update_node(node_type, mocked_g_config, resource_file, inited_node): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.core.host.init_data_dir'), mock.patch( - 'node_cli.core.node.get_meta_info', + 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), mock.patch('node_cli.configs.env.validate_env_params'), diff --git a/tests/tools_meta_test.py b/tests/tools_meta_test.py index 9a217233..3c252b9c 100644 --- a/tests/tools_meta_test.py +++ b/tests/tools_meta_test.py @@ -1,35 +1,34 @@ import json +import os from node_cli.configs import META_FILEPATH from node_cli.utils.meta import ( - CliMeta, - compose_default_meta, DEFAULT_CONFIG_STREAM, DEFAULT_VERSION, - ensure_meta, - get_meta_info, - save_meta, - update_meta, + CliMeta, + CliMetaManager, + MirageCliMeta, + MirageCliMetaManager, ) from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3 def test_get_meta_info_v1(meta_file_v1): - meta = get_meta_info() + meta = CliMetaManager().get_meta_info() assert meta.version == TEST_META_V1['version'] assert meta.config_stream == TEST_META_V1['config_stream'] assert meta.docker_lvmpy_stream == '1.0.0' def test_get_meta_info_v2(meta_file_v2): - meta = get_meta_info() + meta = CliMetaManager().get_meta_info() assert meta.version == TEST_META_V2['version'] assert meta.config_stream == TEST_META_V2['config_stream'] assert meta.docker_lvmpy_stream == TEST_META_V2['docker_lvmpy_stream'] def test_get_meta_info_v3(meta_file_v3): - meta = get_meta_info() + meta = CliMetaManager().get_meta_info() assert meta.version == TEST_META_V3['version'] assert meta.config_stream == TEST_META_V3['config_stream'] assert meta.docker_lvmpy_stream == TEST_META_V3['docker_lvmpy_stream'] @@ -38,12 +37,12 @@ def test_get_meta_info_v3(meta_file_v3): def test_get_meta_info_empty(): - meta = get_meta_info() + meta = CliMetaManager().get_meta_info() assert meta is None def test_compose_default_meta(): - meta = compose_default_meta() + meta = CliMetaManager().compose_default_meta() assert meta.version == '1.0.0' assert meta.config_stream == '1.1.0' assert meta.docker_lvmpy_stream == '1.0.0' @@ -53,7 +52,7 @@ def test_compose_default_meta(): def test_save_meta(meta_file_v2): meta = CliMeta(version='1.1.2', config_stream='2.2.2') - save_meta(meta) + CliMetaManager().save_meta(meta) with open(META_FILEPATH) as meta_f: saved_json = json.load(meta_f) assert saved_json == { @@ -66,15 +65,15 @@ def test_save_meta(meta_file_v2): def test_update_meta_from_v2_to_v3(meta_file_v2): - old_meta = get_meta_info() - update_meta( + old_meta = CliMetaManager().get_meta_info() + CliMetaManager().update_meta( version='3.3.3', config_stream='1.1.1', docker_lvmpy_stream='1.2.2', os_id='debian', os_version='11', ) - meta = get_meta_info() + meta = CliMetaManager().get_meta_info() assert meta.version == '3.3.3' assert meta.config_stream == '1.1.1' assert meta.docker_lvmpy_stream == '1.2.2' @@ -84,14 +83,14 @@ def test_update_meta_from_v2_to_v3(meta_file_v2): def test_update_meta_from_v1(meta_file_v1): - update_meta( + CliMetaManager().update_meta( version='4.4.4', config_stream='beta', docker_lvmpy_stream='1.3.3', os_id='debian', os_version='11', ) - meta = get_meta_info() + meta = CliMetaManager().get_meta_info() assert meta.version == '4.4.4' assert meta.config_stream == 'beta' assert meta.docker_lvmpy_stream == '1.3.3' @@ -100,14 +99,14 @@ def test_update_meta_from_v1(meta_file_v1): def test_update_meta_from_v3(meta_file_v3): - update_meta( + CliMetaManager().update_meta( version='5.5.5', config_stream='stable', docker_lvmpy_stream='1.2.3', os_id='ubuntu', os_version='20.04', ) - meta = get_meta_info() + meta = CliMetaManager().get_meta_info() assert meta.version == '5.5.5' assert meta.config_stream == 'stable' assert meta.docker_lvmpy_stream == '1.2.3' @@ -116,7 +115,208 @@ def test_update_meta_from_v3(meta_file_v3): def test_ensure_meta(ensure_meta_removed): - ensure_meta() - assert get_meta_info() == CliMeta(DEFAULT_VERSION, DEFAULT_CONFIG_STREAM) - ensure_meta(CliMeta(version='1.1.1', config_stream='1.1.1')) - assert get_meta_info() == CliMeta(DEFAULT_VERSION, DEFAULT_CONFIG_STREAM) + CliMetaManager().ensure_meta() + assert CliMetaManager().get_meta_info() == CliMeta(DEFAULT_VERSION, DEFAULT_CONFIG_STREAM) + CliMetaManager().ensure_meta(CliMeta(version='1.1.1', config_stream='1.1.1')) + assert CliMetaManager().get_meta_info() == CliMeta(DEFAULT_VERSION, DEFAULT_CONFIG_STREAM) + + +def test_mirage_get_meta_info_v1(meta_file_v1): + meta = MirageCliMetaManager().get_meta_info() + assert meta.version == TEST_META_V1['version'] + assert meta.config_stream == TEST_META_V1['config_stream'] + assert meta.os_id == 'ubuntu' + assert meta.os_version == '18.04' + + +def test_mirage_get_meta_info_v2(meta_file_v2): + meta = MirageCliMetaManager().get_meta_info() + assert meta.version == TEST_META_V2['version'] + assert meta.config_stream == TEST_META_V2['config_stream'] + assert meta.os_id == 'ubuntu' # default value + assert meta.os_version == '18.04' # default value + + +def test_mirage_get_meta_info_v3(meta_file_v3): + meta = MirageCliMetaManager().get_meta_info() + assert meta.version == TEST_META_V3['version'] + assert meta.config_stream == TEST_META_V3['config_stream'] + assert meta.os_id == TEST_META_V3['os_id'] + assert meta.os_version == TEST_META_V3['os_version'] + + +def test_mirage_get_meta_info_empty(): + meta = MirageCliMetaManager().get_meta_info() + assert meta is None + + +def test_mirage_compose_default_meta(): + meta = MirageCliMetaManager().compose_default_meta() + assert meta.version == '1.0.0' + assert meta.config_stream == '1.1.0' + assert meta.os_id == 'ubuntu' + assert meta.os_version == '18.04' + assert not hasattr(meta, 'docker_lvmpy_stream') + + +def test_mirage_save_meta(meta_file_v2): + meta = MirageCliMeta( + version='2.2.2', config_stream='mirage-stable', os_id='debian', os_version='11' + ) + MirageCliMetaManager().save_meta(meta) + with open(META_FILEPATH) as meta_f: + saved_json = json.load(meta_f) + assert saved_json == { + 'version': '2.2.2', + 'config_stream': 'mirage-stable', + 'os_id': 'debian', + 'os_version': '11', + } + assert 'docker_lvmpy_stream' not in saved_json + + +def test_mirage_update_meta_from_v2_to_v3(meta_file_v2): + old_meta = MirageCliMetaManager().get_meta_info() + MirageCliMetaManager().update_meta( + version='3.3.3', + config_stream='mirage-beta', + os_id='debian', + os_version='11', + ) + meta = MirageCliMetaManager().get_meta_info() + assert meta.version == '3.3.3' + assert meta.config_stream == 'mirage-beta' + assert meta.os_id == 'debian' + assert meta.os_version == '11' + assert meta != old_meta + + +def test_mirage_update_meta_from_v1(meta_file_v1): + MirageCliMetaManager().update_meta( + version='4.4.4', + config_stream='mirage-develop', + os_id='centos', + os_version='8', + ) + meta = MirageCliMetaManager().get_meta_info() + assert meta.version == '4.4.4' + assert meta.config_stream == 'mirage-develop' + assert meta.os_id == 'centos' + assert meta.os_version == '8' + + +def test_mirage_update_meta_from_v3(meta_file_v3): + MirageCliMetaManager().update_meta( + version='5.5.5', + config_stream='mirage-stable', + os_id='ubuntu', + os_version='22.04', + ) + meta = MirageCliMetaManager().get_meta_info() + assert meta.version == '5.5.5' + assert meta.config_stream == 'mirage-stable' + assert meta.os_id == 'ubuntu' + assert meta.os_version == '22.04' + + +def test_mirage_ensure_meta(ensure_meta_removed): + MirageCliMetaManager().ensure_meta() + assert MirageCliMetaManager().get_meta_info() == MirageCliMeta( + DEFAULT_VERSION, DEFAULT_CONFIG_STREAM + ) + MirageCliMetaManager().ensure_meta(MirageCliMeta(version='1.1.1', config_stream='1.1.1')) + assert MirageCliMetaManager().get_meta_info() == MirageCliMeta( + DEFAULT_VERSION, DEFAULT_CONFIG_STREAM + ) + + +def test_mirage_get_meta_info_raw(meta_file_v3): + raw_meta = MirageCliMetaManager().get_meta_info(raw=True) + assert isinstance(raw_meta, dict) + assert raw_meta['version'] == TEST_META_V3['version'] + assert raw_meta['config_stream'] == TEST_META_V3['config_stream'] + assert raw_meta['os_id'] == TEST_META_V3['os_id'] + assert raw_meta['os_version'] == TEST_META_V3['os_version'] + assert 'docker_lvmpy_stream' not in raw_meta + + +def test_mirage_get_meta_info_raw_empty(): + raw_meta = MirageCliMetaManager().get_meta_info(raw=True) + assert raw_meta == {} + + +def test_mirage_asdict(): + meta = MirageCliMeta( + version='1.2.3', config_stream='test-stream', os_id='fedora', os_version='35' + ) + meta_dict = meta.asdict() + expected = { + 'version': '1.2.3', + 'config_stream': 'test-stream', + 'os_id': 'fedora', + 'os_version': '35', + } + assert meta_dict == expected + assert 'docker_lvmpy_stream' not in meta_dict + + +def test_mirage_meta_compatibility_with_cli_meta_file(meta_file_v3): + meta = MirageCliMetaManager().get_meta_info() + assert meta.version == TEST_META_V3['version'] + assert meta.config_stream == TEST_META_V3['config_stream'] + assert meta.os_id == TEST_META_V3['os_id'] + assert meta.os_version == TEST_META_V3['os_version'] + # Should not have docker_lvmpy_stream even though it's in the file + assert not hasattr(meta, 'docker_lvmpy_stream') + + +def test_mirage_save_meta_overwrites_cli_meta(meta_file_v3): + with open(META_FILEPATH) as f: + original_data = json.load(f) + assert 'docker_lvmpy_stream' in original_data + + mirage_meta = MirageCliMeta(version='2.0.0', config_stream='mirage-new') + MirageCliMetaManager().save_meta(mirage_meta) + + with open(META_FILEPATH) as f: + saved_data = json.load(f) + assert 'docker_lvmpy_stream' not in saved_data + assert saved_data['version'] == '2.0.0' + assert saved_data['config_stream'] == 'mirage-new' + + +def test_mirage_ensure_meta_with_existing_cli_meta(meta_file_v3): + MirageCliMetaManager().ensure_meta() + meta = MirageCliMetaManager().get_meta_info() + assert meta.version == TEST_META_V3['version'] + assert meta.config_stream == TEST_META_V3['config_stream'] + + +def test_mirage_meta_defaults(): + meta = MirageCliMeta() + assert meta.version == DEFAULT_VERSION + assert meta.config_stream == DEFAULT_CONFIG_STREAM + assert meta.os_id == 'ubuntu' + assert meta.os_version == '18.04' + + +def test_mirage_meta_partial_initialization(): + meta = MirageCliMeta(version='1.5.0', os_id='alpine') + assert meta.version == '1.5.0' + assert meta.config_stream == DEFAULT_CONFIG_STREAM + assert meta.os_id == 'alpine' + assert meta.os_version == '18.04' + + +def test_mirage_update_meta_ensure_called(): + manager = MirageCliMetaManager() + + manager.update_meta(version='1.0.0', config_stream='test', os_id='ubuntu', os_version='20.04') + + meta = manager.get_meta_info() + assert meta is not None + assert meta.version == '1.0.0' + assert meta.config_stream == 'test' + + if os.path.isfile(META_FILEPATH): + os.remove(META_FILEPATH) From 4759cc5b1aad885bc9101214258d9893dc3963ac Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 11 Jun 2025 11:18:33 +0100 Subject: [PATCH 089/332] Fix tests --- tests/cli/mirage_cli_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py index 3aa2715a..e8400219 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/mirage_cli_test.py @@ -175,4 +175,4 @@ def test_mirage_node_migrate(mock_migrate_core, valid_env_file): result = runner.invoke(migrate_node, ['--yes', valid_env_file]) assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' - mock_migrate_core.assert_called_once_with(valid_env_file, None) + mock_migrate_core.assert_called_once_with(env_filepath=valid_env_file) From 3bc8f73b1fcdd41002a13456544fc18d9492f307 Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 11 Jun 2025 19:22:14 +0100 Subject: [PATCH 090/332] Fix boot update --- .gitignore | 4 +++- node_cli/operations/base.py | 3 +-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index e183f4a7..c1b8fee2 100644 --- a/.gitignore +++ b/.gitignore @@ -122,4 +122,6 @@ test-env nginx.conf tests/.skale/node_data/docker.json tests/.skale/node_data/node_options.json -tests/.skale/config/nginx.conf.j2 \ No newline at end of file +tests/.skale/config/nginx.conf.j2 + +.zed diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 521620e0..9b554dde 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -65,7 +65,7 @@ remove_dynamic_containers, ) from node_cli.utils.helper import str_to_bool, rm_dir -from node_cli.utils.meta import CliMetaManager +from node_cli.utils.meta import CliMetaManager, MirageCliMetaManager from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks @@ -225,7 +225,6 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], - None, distro.id(), distro.version(), ) From 57374491dd7a6c2493362b4470c7efb72f4e3080 Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 11 Jun 2025 19:24:34 +0100 Subject: [PATCH 091/332] Fix mirage boot --- node_cli/operations/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 9b554dde..456b4359 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -211,7 +211,7 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: prepare_host(env_filepath, env['ENV_TYPE']) - meta_manager = CliMetaManager() + meta_manager = MirageCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: From 43c7ae4445946e990d1e7ab4bf9f4722949448dc Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 13 Jun 2025 17:59:28 +0100 Subject: [PATCH 092/332] Introduce Env class --- node_cli/cli/mirage_node.py | 1 - node_cli/configs/env.py | 163 ++++++++++++++++----- node_cli/core/node.py | 10 +- node_cli/core/resources.py | 6 +- node_cli/core/schains.py | 2 +- tests/configs/configs_env_validate_test.py | 62 ++------ 6 files changed, 153 insertions(+), 91 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index d3fe259a..47f43718 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -107,5 +107,4 @@ def restore_node(backup_path, env_file, config_only): ) @streamed_cmd def migrate_node(env_filepath: str) -> None: - click.echo("Placeholder: Command 'mirage node migrate' is not yet implemented.") migrate_from_boot(env_filepath=env_filepath) diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 9979beef..0a12ddb3 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -17,10 +17,13 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import inspect import os -from typing import Dict, List +from typing import Dict, List, NamedTuple +from dataclasses import dataclass +from abc import ABC -from dotenv import load_dotenv +from dotenv.main import DotEnv from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH from node_cli.configs.alias_address_validation import validate_env_alias_or_address, ContractType @@ -32,6 +35,91 @@ ALLOWED_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet'] + +class ValidationResult(NamedTuple): + result: bool + missing: set + extra: set + + +@dataclass(kw_only=True) +class BaseEnvConfig(ABC): + container_configs_stream: str + endpoint: str + sgx_server_url: str + env_type: str + filebeat_host: str + disk_mountpoint: str + + container_configs_dir: str = '' + skip_docker_config: str = '' + skip_docker_cleanup: str = '' + + def to_env(self) -> Dict[str, str]: + result = {} + for field_name, field_value in self.__dict__.items(): + upper_key = field_name.upper() + result[upper_key] = str(field_value) if field_value is not None else '' + return result + + @classmethod + def validate_params(cls, params: Dict) -> ValidationResult: + parameters = inspect.signature(cls.__init__).parameters + missing = [] + keys = params.keys() + expected_keys = { + name.upper() + for name, value in parameters.items() + if name != 'self' and value.default == inspect._empty + } + optional_keys = { + name.upper() + for name, value in parameters.items() + if name != 'self' and value.default != inspect._empty + } + missing = expected_keys - keys + extra = keys - expected_keys - optional_keys + print('HEREC', params, parameters.items()) + return ValidationResult(missing == set() and extra == set(), missing, extra) + + +@dataclass +class MirageEnvConfig(BaseEnvConfig): + mirage_contracts: str + enforce_btrfs: str = '' + + +@dataclass +class MirageBootEnvConfig(BaseEnvConfig): + manager_contracts: str + ima_contracts: str + enforce_btrfs: str = '' + + +@dataclass +class SkaleEnvConfig(BaseEnvConfig): + manager_contracts: str + ima_contracts: str + docker_lvmpy_stream: str + monitoring_containers: str = '' + telegraf: str = '' + influx_token: str = '' + influx_url: str = '' + tg_api_key: str = '' + tg_chat_id: str = '' + disable_dry_run: str = '' + default_gas_limit: str = '' + default_gas_price_wei: str = '' + + +@dataclass +class SyncEnvConfig(BaseEnvConfig): + manager_contracts: str + schain_name: str = '' + ima_contracts: str = '' + enforce_btrfs: str = '' + + CORE_REQUIRED_PARAMS: Dict[str, str] = { 'CONTAINER_CONFIGS_STREAM': '', 'ENDPOINT': '', @@ -90,34 +178,57 @@ def get_validated_env_config( node_type: NodeType, env_filepath: str = SKALE_DIR_ENV_FILEPATH, is_mirage_boot: bool = False, -) -> Dict[str, str]: - load_env_file(env_filepath) - params = build_env_params(node_type=node_type, is_mirage_boot=is_mirage_boot) - populate_env_params(params) - validate_env_params(params=params) - return params +) -> BaseEnvConfig: + params = parse_env_file(env_filepath) + EnvType = get_env_class(node_type, is_mirage_boot) + _, missing_params, extra_params = EnvType.validate_params(params) + + if len(missing_params) > 0: + error_exit(f'Missing required parameters: {missing_params}') + + if len(extra_params) > 0: + error_exit(f'Extra parameters: {extra_params}') + + validate_env_type(env_type=params['ENV_TYPE']) + params = to_lower_keys(params) + env = EnvType(**params) + + if node_type == NodeType.MIRAGE and not is_mirage_boot: + contract_alias_or_address = env.mirage_contracts + else: + contract_alias_or_address = params.get('MANAGER_CONTRACTS', '') + contract_alias_or_address = env.manager_contracts + validate_env_alias_or_address(contract_alias_or_address, ContractType.MANAGER, env.endpoint) + if 'IMA_CONTRACTS' in params: + validate_env_alias_or_address(env.ima_contracts, ContractType.IMA, env.endpoint) -def load_env_file(env_filepath: str) -> None: - if not load_dotenv(dotenv_path=env_filepath): + return env + + +def to_lower_keys(params: Dict[str, str]) -> Dict[str, str]: + return {key.lower(): value for key, value in params.items()} + + +def parse_env_file(env_filepath: str) -> Dict: + if not os.path.isfile(env_filepath): error_exit(f'Failed to load environment from {env_filepath}') + return DotEnv(env_filepath).dict() -def build_env_params( +def get_env_class( node_type: NodeType, is_mirage_boot: bool = False, -) -> Dict[str, str]: +) -> type[BaseEnvConfig]: if node_type == NodeType.MIRAGE and is_mirage_boot: - params = REQUIRED_PARAMS_MIRAGE_BOOT.copy() + env_type = MirageBootEnvConfig elif node_type == NodeType.MIRAGE: - params = REQUIRED_PARAMS_MIRAGE.copy() + env_type = MirageEnvConfig elif node_type == NodeType.SYNC: - params = REQUIRED_PARAMS_SYNC.copy() + env_type = SyncEnvConfig else: - params = REQUIRED_PARAMS_SKALE.copy() - - params.update(OPTIONAL_PARAMS) - return params + env_type = SkaleEnvConfig + return env_type def populate_env_params(params: Dict[str, str]) -> None: @@ -127,20 +238,6 @@ def populate_env_params(params: Dict[str, str]) -> None: params[key] = str(env_value) -def validate_env_params( - params: Dict[str, str], -) -> None: - missing = absent_required_params(params) - if missing: - error_exit(f'Missing required parameters: {missing}') - validate_env_type(env_type=params['ENV_TYPE']) - endpoint = params['ENDPOINT'] - validate_env_alias_or_address(params['MANAGER_CONTRACTS'], ContractType.MANAGER, endpoint) - - if 'IMA_CONTRACTS' in params.keys(): - validate_env_alias_or_address(params['IMA_CONTRACTS'], ContractType.IMA, endpoint) - - def validate_env_type(env_type: str) -> None: if env_type not in ALLOWED_ENV_TYPES: error_exit(f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. Actual: "{env_type}"') diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 1d4b49d7..1c4c77d7 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -233,7 +233,7 @@ def compose_node_env( is_mirage_boot: bool = False, ) -> dict[str, str]: if env_filepath is not None: - env_params = get_validated_env_config( + env_config = get_validated_env_config( node_type=node_type, env_filepath=env_filepath, is_mirage_boot=is_mirage_boot, @@ -241,7 +241,7 @@ def compose_node_env( if save: save_env_params(env_filepath) else: - env_params = get_validated_env_config( + env_config = get_validated_env_config( node_type=node_type, env_filepath=INIT_ENV_FILEPATH, is_mirage_boot=is_mirage_boot, @@ -257,7 +257,7 @@ def compose_node_env( 'SCHAINS_MNT_DIR': mnt_dir, 'FILESTORAGE_MAPPING': FILESTORAGE_MAPPING, 'SKALE_LIB_PATH': SKALE_STATE_DIR, - **env_params, + **env_config.to_env(), } if inited_node and not node_type == NodeType.SYNC: @@ -502,8 +502,8 @@ def run_checks( return if disk is None: - env = get_validated_env_config(node_type=node_type) - disk = env['DISK_MOUNTPOINT'] + env_config = get_validated_env_config(node_type=node_type) + disk = env_config.disk_mountpoint failed_checks = run_host_checks(disk, node_type, network, container_config_path) if not failed_checks: print('Requirements checking successfully finished!') diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 669f8d0f..782c0ea7 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -102,12 +102,10 @@ def generate_resource_allocation_config( logger.debug(msg) print(msg) return - env_params = get_validated_env_config(node_type=node_type, env_filepath=env_file) - if env_params is None: - return + env_config = get_validated_env_config(node_type=node_type, env_filepath=env_file) logger.info('Generating resource allocation file ...') try: - update_resource_allocation(env_params['ENV_TYPE']) + update_resource_allocation(env_config.env_type) except Exception as e: logger.exception(e) print("Can't generate resource allocation file, check out CLI logs") diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 957e02df..111a37df 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -191,7 +191,7 @@ def restore_schain_from_snapshot( ) -> None: if env_type is None: env_config = get_validated_env_config(node_type=node_type) - env_type = env_config['ENV_TYPE'] + env_type = env_config.env_type ensure_schain_volume(schain, schain_type, env_type) block_number = get_block_number_from_path(snapshot_path) if block_number == -1: diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 03c955e9..e3d3dad0 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -5,16 +5,16 @@ import mock from node_cli.configs.env import ( + SkaleEnvConfig, + SyncEnvConfig, + MirageEnvConfig, absent_required_params, - load_env_file, - build_env_params, + get_env_class, populate_env_params, get_validated_env_config, validate_env_params, validate_env_type, ALLOWED_ENV_TYPES, - REQUIRED_PARAMS_SKALE, - REQUIRED_PARAMS_SYNC, REQUIRED_PARAMS_MIRAGE_BOOT, REQUIRED_PARAMS_MIRAGE, OPTIONAL_PARAMS, @@ -55,12 +55,6 @@ def test_absent_required_params_returns_missing_keys(): assert 'MONITORING_CONTAINERS' not in missing -def test_load_env_file_nonexistent(): - with pytest.raises(SystemExit) as excinfo: - load_env_file('nonexistent.env') - assert excinfo.value.code == CLIExitCodes.FAILURE.value - - def test_populate_env_params_updates_from_environ(monkeypatch): params = {'FOO': ''} monkeypatch.setenv('FOO', 'bar') @@ -69,45 +63,18 @@ def test_populate_env_params_updates_from_environ(monkeypatch): @pytest.mark.parametrize( - 'node_type, is_mirage_boot, expected_keys, unexpected_keys', + 'node_type, is_mirage_boot, expected_type', [ - ( - NodeType.REGULAR, - False, - REQUIRED_PARAMS_SKALE.keys(), - {'SCHAIN_NAME'}, - ), - ( - NodeType.SYNC, - False, - REQUIRED_PARAMS_SYNC.keys(), - set(), - ), - ( - NodeType.MIRAGE, - True, - REQUIRED_PARAMS_MIRAGE_BOOT.keys(), - {'DOCKER_LVMPY_STREAM', 'SCHAIN_NAME'}, - ), - ( - NodeType.MIRAGE, - False, - REQUIRED_PARAMS_MIRAGE.keys(), - {'IMA_CONTRACTS', 'DOCKER_LVMPY_STREAM', 'SCHAIN_NAME'}, - ), + (NodeType.REGULAR, False, SkaleEnvConfig), + (NodeType.SYNC, False, SyncEnvConfig), + (NodeType.MIRAGE, True, SkaleEnvConfig), + (NodeType.MIRAGE, False, MirageEnvConfig), ], ids=['regular', 'sync', 'mirage_boot', 'mirage_regular'], ) -def test_build_env_params_keys(node_type, is_mirage_boot, expected_keys, unexpected_keys): - params = build_env_params(node_type=node_type, is_mirage_boot=is_mirage_boot) - param_keys = set(params.keys()) - - all_expected = set(expected_keys) | set(OPTIONAL_PARAMS.keys()) - missing_expected = all_expected - param_keys - assert not missing_expected, f'Missing expected keys: {missing_expected}' - - found_unexpected = set(unexpected_keys) & param_keys - assert not found_unexpected, f'Found unexpected keys: {found_unexpected}' +def test_build_env_params_keys(node_type, is_mirage_boot, expected_type): + env_type = get_env_class(node_type=node_type, is_mirage_boot=is_mirage_boot) + assert env_type == expected_type @pytest.mark.parametrize( @@ -318,9 +285,10 @@ def test_get_validated_env_config_mirage_success( ) assert config is not None - assert set(config.keys()) == set(expected_config.keys()) + plain_config = config.to_env() + assert set(plain_config.keys()) == set(expected_config.keys()) for key in expected_config: - assert config[key] == expected_config[key] + assert plain_config[key] == expected_config[key] for key in {**required_keys_dict, **OPTIONAL_PARAMS}: monkeypatch.delenv(key, raising=False) From ed389fea8251339351d96c9973ed066531d3863e Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 13 Jun 2025 18:20:38 +0100 Subject: [PATCH 093/332] Rename EnvType to UserConfig --- node_cli/configs/env.py | 51 ++++----- node_cli/core/node.py | 10 +- node_cli/core/resources.py | 6 +- node_cli/core/schains.py | 6 +- tests/configs/configs_env_validate_test.py | 122 +++++---------------- 5 files changed, 63 insertions(+), 132 deletions(-) diff --git a/node_cli/configs/env.py b/node_cli/configs/env.py index 0a12ddb3..0d95407c 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/env.py @@ -43,7 +43,7 @@ class ValidationResult(NamedTuple): @dataclass(kw_only=True) -class BaseEnvConfig(ABC): +class BaseUserConfig(ABC): container_configs_stream: str endpoint: str sgx_server_url: str @@ -84,20 +84,20 @@ def validate_params(cls, params: Dict) -> ValidationResult: @dataclass -class MirageEnvConfig(BaseEnvConfig): +class MirageUserConfig(BaseUserConfig): mirage_contracts: str enforce_btrfs: str = '' @dataclass -class MirageBootEnvConfig(BaseEnvConfig): +class MirageBootUserConfig(BaseUserConfig): manager_contracts: str ima_contracts: str enforce_btrfs: str = '' @dataclass -class SkaleEnvConfig(BaseEnvConfig): +class SkaleUserConfig(BaseUserConfig): manager_contracts: str ima_contracts: str docker_lvmpy_stream: str @@ -113,7 +113,7 @@ class SkaleEnvConfig(BaseEnvConfig): @dataclass -class SyncEnvConfig(BaseEnvConfig): +class SyncUserConfig(BaseUserConfig): manager_contracts: str schain_name: str = '' ima_contracts: str = '' @@ -174,14 +174,14 @@ def absent_required_params(params: Dict[str, str]) -> List[str]: return [key for key in params if key not in OPTIONAL_PARAMS and not params[key]] -def get_validated_env_config( +def get_validated_user_config( node_type: NodeType, env_filepath: str = SKALE_DIR_ENV_FILEPATH, is_mirage_boot: bool = False, -) -> BaseEnvConfig: +) -> BaseUserConfig: params = parse_env_file(env_filepath) - EnvType = get_env_class(node_type, is_mirage_boot) - _, missing_params, extra_params = EnvType.validate_params(params) + UserConfigType = get_user_config_type(node_type, is_mirage_boot) + _, missing_params, extra_params = UserConfigType.validate_params(params) if len(missing_params) > 0: error_exit(f'Missing required parameters: {missing_params}') @@ -191,19 +191,19 @@ def get_validated_env_config( validate_env_type(env_type=params['ENV_TYPE']) params = to_lower_keys(params) - env = EnvType(**params) + user_config = UserConfigType(**params) if node_type == NodeType.MIRAGE and not is_mirage_boot: - contract_alias_or_address = env.mirage_contracts + contract_alias_or_address = user_config.mirage_contracts else: contract_alias_or_address = params.get('MANAGER_CONTRACTS', '') - contract_alias_or_address = env.manager_contracts - validate_env_alias_or_address(contract_alias_or_address, ContractType.MANAGER, env.endpoint) + contract_alias_or_address = user_config.manager_contracts + validate_env_alias_or_address(contract_alias_or_address, ContractType.MANAGER, user_config.endpoint) if 'IMA_CONTRACTS' in params: - validate_env_alias_or_address(env.ima_contracts, ContractType.IMA, env.endpoint) + validate_env_alias_or_address(user_config.ima_contracts, ContractType.IMA, user_config.endpoint) - return env + return user_config def to_lower_keys(params: Dict[str, str]) -> Dict[str, str]: @@ -216,26 +216,19 @@ def parse_env_file(env_filepath: str) -> Dict: return DotEnv(env_filepath).dict() -def get_env_class( +def get_user_config_type( node_type: NodeType, is_mirage_boot: bool = False, -) -> type[BaseEnvConfig]: +) -> type[BaseUserConfig]: if node_type == NodeType.MIRAGE and is_mirage_boot: - env_type = MirageBootEnvConfig + user_config_type = MirageBootUserConfig elif node_type == NodeType.MIRAGE: - env_type = MirageEnvConfig + user_config_type = MirageUserConfig elif node_type == NodeType.SYNC: - env_type = SyncEnvConfig + user_config_type = SyncUserConfig else: - env_type = SkaleEnvConfig - return env_type - - -def populate_env_params(params: Dict[str, str]) -> None: - for key in params: - env_value = os.getenv(key) - if env_value is not None: - params[key] = str(env_value) + user_config_type = SkaleUserConfig + return user_config_type def validate_env_type(env_type: str) -> None: diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 1c4c77d7..43d66ffd 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -42,7 +42,7 @@ TM_INIT_TIMEOUT, ) from node_cli.cli import __version__ -from node_cli.configs.env import get_validated_env_config, SKALE_DIR_ENV_FILEPATH +from node_cli.configs.env import get_validated_user_config, SKALE_DIR_ENV_FILEPATH from node_cli.configs.cli_logger import LOG_DATA_PATH as CLI_LOG_DATA_PATH from node_cli.core.host import is_node_inited, save_env_params, get_flask_secret_key @@ -233,7 +233,7 @@ def compose_node_env( is_mirage_boot: bool = False, ) -> dict[str, str]: if env_filepath is not None: - env_config = get_validated_env_config( + user_config = get_validated_user_config( node_type=node_type, env_filepath=env_filepath, is_mirage_boot=is_mirage_boot, @@ -241,7 +241,7 @@ def compose_node_env( if save: save_env_params(env_filepath) else: - env_config = get_validated_env_config( + user_config = get_validated_user_config( node_type=node_type, env_filepath=INIT_ENV_FILEPATH, is_mirage_boot=is_mirage_boot, @@ -257,7 +257,7 @@ def compose_node_env( 'SCHAINS_MNT_DIR': mnt_dir, 'FILESTORAGE_MAPPING': FILESTORAGE_MAPPING, 'SKALE_LIB_PATH': SKALE_STATE_DIR, - **env_config.to_env(), + **user_config.to_env(), } if inited_node and not node_type == NodeType.SYNC: @@ -502,7 +502,7 @@ def run_checks( return if disk is None: - env_config = get_validated_env_config(node_type=node_type) + env_config = get_validated_user_config(node_type=node_type) disk = env_config.disk_mountpoint failed_checks = run_host_checks(disk, node_type, network, container_config_path) if not failed_checks: diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 782c0ea7..f0a1d4ea 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -24,7 +24,7 @@ import psutil -from node_cli.configs.env import get_validated_env_config +from node_cli.configs.env import get_validated_user_config from node_cli.utils.docker_utils import ensure_volume from node_cli.utils.schain_types import SchainTypes from node_cli.utils.helper import write_json, read_json, run_cmd, safe_load_yml @@ -102,10 +102,10 @@ def generate_resource_allocation_config( logger.debug(msg) print(msg) return - env_config = get_validated_env_config(node_type=node_type, env_filepath=env_file) + user_config = get_validated_user_config(node_type=node_type, env_filepath=env_file) logger.info('Generating resource allocation file ...') try: - update_resource_allocation(env_config.env_type) + update_resource_allocation(user_config.env_type) except Exception as e: logger.exception(e) print("Can't generate resource allocation file, check out CLI logs") diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 111a37df..b5269304 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -15,7 +15,7 @@ SCHAIN_NODE_DATA_PATH, SCHAINS_MNT_DIR_SINGLE_CHAIN, ) -from node_cli.configs.env import get_validated_env_config +from node_cli.configs.env import get_validated_user_config from node_cli.utils.helper import get_request, error_exit, safe_load_yml from node_cli.utils.exit_codes import CLIExitCodes @@ -190,8 +190,8 @@ def restore_schain_from_snapshot( schain_type: str = 'medium', ) -> None: if env_type is None: - env_config = get_validated_env_config(node_type=node_type) - env_type = env_config.env_type + user_config = get_validated_user_config(node_type=node_type) + env_type = user_config.env_type ensure_schain_volume(schain, schain_type, env_type) block_number = get_block_number_from_path(snapshot_path) if block_number == -1: diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index e3d3dad0..5ac014c1 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -1,33 +1,32 @@ import os from typing import Optional + +import mock import pytest import requests -import mock -from node_cli.configs.env import ( - SkaleEnvConfig, - SyncEnvConfig, - MirageEnvConfig, - absent_required_params, - get_env_class, - populate_env_params, - get_validated_env_config, - validate_env_params, - validate_env_type, - ALLOWED_ENV_TYPES, - REQUIRED_PARAMS_MIRAGE_BOOT, - REQUIRED_PARAMS_MIRAGE, - OPTIONAL_PARAMS, -) from node_cli.configs.alias_address_validation import ( - validate_env_alias_or_address, - validate_contract_address, - validate_contract_alias, + ContractType, get_chain_id, get_network_metadata, - ContractType, + validate_contract_address, + validate_contract_alias, + validate_env_alias_or_address, +) +from node_cli.configs.env import ( + ALLOWED_ENV_TYPES, + OPTIONAL_PARAMS, + REQUIRED_PARAMS_MIRAGE, + REQUIRED_PARAMS_MIRAGE_BOOT, + MirageBootUserConfig, + MirageUserConfig, + SkaleUserConfig, + SyncUserConfig, + absent_required_params, + get_user_config_type, + get_validated_user_config, + validate_env_type, ) -from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.node_type import NodeType ENDPOINT = 'http://localhost:8545' @@ -42,38 +41,18 @@ def json(self): return self._json_data -def test_absent_required_params_returns_missing_keys(): - params = { - 'A': '', - 'B': 'value', - 'C': '', - 'MONITORING_CONTAINERS': 'optional', - } - missing = absent_required_params(params) - assert 'A' in missing - assert 'C' in missing - assert 'MONITORING_CONTAINERS' not in missing - - -def test_populate_env_params_updates_from_environ(monkeypatch): - params = {'FOO': ''} - monkeypatch.setenv('FOO', 'bar') - populate_env_params(params) - assert params['FOO'] == 'bar' - - @pytest.mark.parametrize( 'node_type, is_mirage_boot, expected_type', [ - (NodeType.REGULAR, False, SkaleEnvConfig), - (NodeType.SYNC, False, SyncEnvConfig), - (NodeType.MIRAGE, True, SkaleEnvConfig), - (NodeType.MIRAGE, False, MirageEnvConfig), + (NodeType.REGULAR, False, SkaleUserConfig), + (NodeType.SYNC, False, SyncUserConfig), + (NodeType.MIRAGE, True, MirageBootUserConfig), + (NodeType.MIRAGE, False, MirageUserConfig), ], ids=['regular', 'sync', 'mirage_boot', 'mirage_regular'], ) def test_build_env_params_keys(node_type, is_mirage_boot, expected_type): - env_type = get_env_class(node_type=node_type, is_mirage_boot=is_mirage_boot) + env_type = get_user_config_type(node_type=node_type, is_mirage_boot=is_mirage_boot) assert env_type == expected_type @@ -190,47 +169,6 @@ def test_validate_env_alias_or_address_with_alias(requests_mock): validate_env_alias_or_address('test-alias', ContractType.IMA, ENDPOINT) -@pytest.mark.parametrize('env_type', ALLOWED_ENV_TYPES) -@pytest.mark.parametrize( - 'required_params, key_to_remove, should_fail', - [ - (REQUIRED_PARAMS_MIRAGE_BOOT, None, False), - (REQUIRED_PARAMS_MIRAGE, None, False), - (REQUIRED_PARAMS_MIRAGE_BOOT, 'IMA_CONTRACTS', True), - (REQUIRED_PARAMS_MIRAGE_BOOT, 'FILEBEAT_HOST', True), - (REQUIRED_PARAMS_MIRAGE, 'FILEBEAT_HOST', True), - ], - ids=[ - 'mirage_boot', - 'mirage_regular', - 'mirage_boot_missing_ima', - 'mirage_boot_missing_filebeat', - 'mirage_regular_missing_filebeat', - ], -) -@mock.patch('node_cli.configs.env.validate_env_alias_or_address') -@mock.patch('node_cli.configs.env.validate_env_type') -def test_validate_env_params_mirage( - mock_validate_type, - mock_validate_alias, - required_params, - key_to_remove, - should_fail, - env_type, -): - params = {k: f'{k}_val' for k in required_params} - params['ENV_TYPE'] = env_type - - if key_to_remove: - params[key_to_remove] = '' - - if should_fail: - with pytest.raises(SystemExit): - validate_env_params(params=params) - else: - validate_env_params(params=params) - - @pytest.mark.parametrize( 'node_type, is_boot, required_keys_dict', [ @@ -280,12 +218,12 @@ def test_get_validated_env_config_mirage_success( with mock.patch('node_cli.configs.alias_address_validation.requests.post') as mock_post: mock_post.return_value = FakeResponse(200, {'result': '0x123'}) - config = get_validated_env_config( + user_config = get_validated_user_config( node_type=node_type, env_filepath=str(env_file), is_mirage_boot=is_boot ) - assert config is not None - plain_config = config.to_env() + assert user_config is not None + plain_config = user_config.to_env() assert set(plain_config.keys()) == set(expected_config.keys()) for key in expected_config: assert plain_config[key] == expected_config[key] @@ -296,7 +234,7 @@ def test_get_validated_env_config_mirage_success( def test_get_validated_env_config_missing_file(): with pytest.raises(SystemExit): - get_validated_env_config(env_filepath='nonexistent.env', node_type=NodeType.REGULAR) + get_validated_user_config(env_filepath='nonexistent.env', node_type=NodeType.REGULAR) def test_get_validated_env_config_unreadable_file(tmp_path): @@ -306,6 +244,6 @@ def test_get_validated_env_config_unreadable_file(tmp_path): try: os.chmod(env_file, 0o000) with pytest.raises(PermissionError): - get_validated_env_config(env_filepath=str(env_file), node_type=NodeType.REGULAR) + get_validated_user_config(env_filepath=str(env_file), node_type=NodeType.REGULAR) finally: os.chmod(env_file, original_mode) From bd5cd2113f5f2285141e003ace568d4ffb5948e3 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 16 Jun 2025 17:09:23 +0100 Subject: [PATCH 094/332] Add UserConfig --- node_cli/cli/node.py | 2 +- node_cli/configs/alias_address_validation.py | 2 +- node_cli/configs/{env.py => user.py} | 87 +++++--------------- node_cli/core/host.py | 2 +- node_cli/core/node.py | 4 +- node_cli/core/resources.py | 2 +- node_cli/core/schains.py | 2 +- node_cli/operations/base.py | 4 +- tests/cli/node_test.py | 67 ++++++--------- tests/cli/resources_allocation_test.py | 35 ++++---- tests/cli/sync_node_test.py | 33 ++++---- tests/configs/configs_env_validate_test.py | 75 +---------------- tests/conftest.py | 85 +++++++++++++++++++ tests/core/core_node_test.py | 86 +++++++++---------- tests/test-env | 15 ---- 15 files changed, 222 insertions(+), 279 deletions(-) rename node_cli/configs/{env.py => user.py} (70%) delete mode 100644 tests/test-env diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index db4d6fa5..61d71af7 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -37,7 +37,7 @@ run_checks, ) from node_cli.configs import DEFAULT_NODE_BASE_PORT -from node_cli.configs.env import ALLOWED_ENV_TYPES +from node_cli.configs.user import ALLOWED_ENV_TYPES from node_cli.utils.decorators import check_inited from node_cli.utils.helper import abort_if_false, streamed_cmd, IP_TYPE from node_cli.utils.texts import safe_load_texts diff --git a/node_cli/configs/alias_address_validation.py b/node_cli/configs/alias_address_validation.py index e0dcaa5e..ffdbb2ee 100644 --- a/node_cli/configs/alias_address_validation.py +++ b/node_cli/configs/alias_address_validation.py @@ -38,7 +38,7 @@ class ContractType(Enum): MANAGER = 'skale-manager' -def validate_env_alias_or_address( +def validate_alias_or_address( alias_or_address: str, contract_type: ContractType, endpoint: str ) -> None: if is_contract_address(alias_or_address): diff --git a/node_cli/configs/env.py b/node_cli/configs/user.py similarity index 70% rename from node_cli/configs/env.py rename to node_cli/configs/user.py index 0d95407c..9015d41f 100644 --- a/node_cli/configs/env.py +++ b/node_cli/configs/user.py @@ -19,14 +19,14 @@ import inspect import os -from typing import Dict, List, NamedTuple +from typing import Dict, NamedTuple from dataclasses import dataclass from abc import ABC from dotenv.main import DotEnv from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH -from node_cli.configs.alias_address_validation import validate_env_alias_or_address, ContractType +from node_cli.configs.alias_address_validation import validate_alias_or_address, ContractType from node_cli.utils.node_type import NodeType from node_cli.utils.helper import error_exit @@ -46,7 +46,6 @@ class ValidationResult(NamedTuple): class BaseUserConfig(ABC): container_configs_stream: str endpoint: str - sgx_server_url: str env_type: str filebeat_host: str disk_mountpoint: str @@ -79,13 +78,13 @@ def validate_params(cls, params: Dict) -> ValidationResult: } missing = expected_keys - keys extra = keys - expected_keys - optional_keys - print('HEREC', params, parameters.items()) return ValidationResult(missing == set() and extra == set(), missing, extra) @dataclass class MirageUserConfig(BaseUserConfig): mirage_contracts: str + sgx_server_url: str enforce_btrfs: str = '' @@ -93,6 +92,7 @@ class MirageUserConfig(BaseUserConfig): class MirageBootUserConfig(BaseUserConfig): manager_contracts: str ima_contracts: str + sgx_server_url: str enforce_btrfs: str = '' @@ -101,6 +101,7 @@ class SkaleUserConfig(BaseUserConfig): manager_contracts: str ima_contracts: str docker_lvmpy_stream: str + sgx_server_url: str monitoring_containers: str = '' telegraf: str = '' influx_token: str = '' @@ -120,68 +121,14 @@ class SyncUserConfig(BaseUserConfig): enforce_btrfs: str = '' -CORE_REQUIRED_PARAMS: Dict[str, str] = { - 'CONTAINER_CONFIGS_STREAM': '', - 'ENDPOINT': '', - 'MANAGER_CONTRACTS': '', - 'DISK_MOUNTPOINT': '', - 'SGX_SERVER_URL': '', - 'ENV_TYPE': '', -} - -REQUIRED_PARAMS_SKALE: Dict[str, str] = { - **CORE_REQUIRED_PARAMS, - 'IMA_CONTRACTS': '', - 'DOCKER_LVMPY_STREAM': '', - 'FILEBEAT_HOST': '', -} - -REQUIRED_PARAMS_MIRAGE_BOOT: Dict[str, str] = { - **CORE_REQUIRED_PARAMS, - 'IMA_CONTRACTS': '', - 'FILEBEAT_HOST': '', -} -REQUIRED_PARAMS_MIRAGE: Dict[str, str] = { - **CORE_REQUIRED_PARAMS, - 'FILEBEAT_HOST': '', -} - -REQUIRED_PARAMS_SYNC: Dict[str, str] = { - **CORE_REQUIRED_PARAMS, - 'SCHAIN_NAME': '', - 'IMA_CONTRACTS': '', - 'DOCKER_LVMPY_STREAM': '', -} - -OPTIONAL_PARAMS: Dict[str, str] = { - 'MONITORING_CONTAINERS': '', - 'TELEGRAF': '', - 'INFLUX_TOKEN': '', - 'INFLUX_URL': '', - 'TG_API_KEY': '', - 'TG_CHAT_ID': '', - 'CONTAINER_CONFIGS_DIR': '', - 'DISABLE_DRY_RUN': '', - 'DEFAULT_GAS_LIMIT': '', - 'DEFAULT_GAS_PRICE_WEI': '', - 'SKIP_DOCKER_CONFIG': '', - 'ENFORCE_BTRFS': '', - 'SKIP_DOCKER_CLEANUP': '', -} - - -def absent_required_params(params: Dict[str, str]) -> List[str]: - return [key for key in params if key not in OPTIONAL_PARAMS and not params[key]] - - def get_validated_user_config( node_type: NodeType, env_filepath: str = SKALE_DIR_ENV_FILEPATH, is_mirage_boot: bool = False, ) -> BaseUserConfig: params = parse_env_file(env_filepath) - UserConfigType = get_user_config_type(node_type, is_mirage_boot) - _, missing_params, extra_params = UserConfigType.validate_params(params) + user_config_type = get_user_config_type(node_type, is_mirage_boot) + _, missing_params, extra_params = user_config_type.validate_params(params) if len(missing_params) > 0: error_exit(f'Missing required parameters: {missing_params}') @@ -189,21 +136,25 @@ def get_validated_user_config( if len(extra_params) > 0: error_exit(f'Extra parameters: {extra_params}') - validate_env_type(env_type=params['ENV_TYPE']) params = to_lower_keys(params) - user_config = UserConfigType(**params) + user_config = user_config_type(**params) + validate_user_config(user_config) - if node_type == NodeType.MIRAGE and not is_mirage_boot: + return user_config + + +def validate_user_config(user_config: BaseUserConfig) -> None: + validate_env_type(env_type=user_config.env_type) + + if isinstance(user_config, MirageUserConfig): contract_alias_or_address = user_config.mirage_contracts else: - contract_alias_or_address = params.get('MANAGER_CONTRACTS', '') contract_alias_or_address = user_config.manager_contracts - validate_env_alias_or_address(contract_alias_or_address, ContractType.MANAGER, user_config.endpoint) - if 'IMA_CONTRACTS' in params: - validate_env_alias_or_address(user_config.ima_contracts, ContractType.IMA, user_config.endpoint) + validate_alias_or_address(contract_alias_or_address, ContractType.MANAGER, user_config.endpoint) - return user_config + if isinstance(user_config, (SkaleUserConfig, MirageBootUserConfig)): + validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, user_config.endpoint) def to_lower_keys(params: Dict[str, str]) -> Dict[str, str]: diff --git a/node_cli/core/host.py b/node_cli/core/host.py index 7c9b2df6..8c684455 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -50,7 +50,7 @@ NGINX_CONFIG_FILEPATH, ) from node_cli.configs.cli_logger import LOG_DATA_PATH -from node_cli.configs.env import SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH +from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH from node_cli.core.nftables import NFTablesManager from node_cli.utils.helper import safe_mkdir diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 43d66ffd..58b7e383 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -42,7 +42,7 @@ TM_INIT_TIMEOUT, ) from node_cli.cli import __version__ -from node_cli.configs.env import get_validated_user_config, SKALE_DIR_ENV_FILEPATH +from node_cli.configs.user import get_validated_user_config, SKALE_DIR_ENV_FILEPATH from node_cli.configs.cli_logger import LOG_DATA_PATH as CLI_LOG_DATA_PATH from node_cli.core.host import is_node_inited, save_env_params, get_flask_secret_key @@ -173,7 +173,7 @@ def restore(backup_path, env_filepath, node_type: NodeType, no_snapshot=False, c restored_ok = restore_op(env, backup_path, node_type=node_type, config_only=config_only) if not restored_ok: error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) - time.sleep(RESTORE_SLEEP_TIMEOUT) + # time.sleep(RESTORE_SLEEP_TIMEOUT) logger.info('Generating resource allocation file ...') update_resource_allocation(env['ENV_TYPE']) print('Node is restored from backup') diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index f0a1d4ea..216aa657 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -24,7 +24,7 @@ import psutil -from node_cli.configs.env import get_validated_user_config +from node_cli.configs.user import get_validated_user_config from node_cli.utils.docker_utils import ensure_volume from node_cli.utils.schain_types import SchainTypes from node_cli.utils.helper import write_json, read_json, run_cmd, safe_load_yml diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index b5269304..9fd126c3 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -15,7 +15,7 @@ SCHAIN_NODE_DATA_PATH, SCHAINS_MNT_DIR_SINGLE_CHAIN, ) -from node_cli.configs.env import get_validated_user_config +from node_cli.configs.user import get_validated_user_config from node_cli.utils.helper import get_request, error_exit, safe_load_yml from node_cli.utils.exit_codes import CLIExitCodes diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 456b4359..1f1ce73e 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -339,7 +339,7 @@ def init_sync( meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], - env['DOCKER_LVMPY_STREAM'], + None, distro.id(), distro.version(), ) @@ -449,8 +449,6 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): distro.id(), distro.version(), ) - update_resource_allocation(env_type=env['ENV_TYPE']) - if not config_only: compose_up(env=env, node_type=node_type) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 85163c3a..67323207 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -18,13 +18,16 @@ # along with this program. If not, see . import pathlib - import mock from unittest.mock import MagicMock, patch + import requests import logging +import pytest -from node_cli.configs import SKALE_DIR, G_CONF_HOME +from node_cli.utils.node_type import NodeType + +from node_cli.configs import INIT_ENV_FILEPATH, SKALE_DIR, G_CONF_HOME from node_cli.cli.node import ( node_info, register_node, @@ -321,14 +324,20 @@ def test_backup(): assert 'Backup archive succesfully created ' in result.output -def test_restore(mocked_g_config): +@pytest.mark.parametrize("node_type,test_user_conf", [ + (NodeType.REGULAR, "regular_user_conf"), + (NodeType.MIRAGE, "mirage_user_conf"), + (NodeType.SYNC, "sync_user_conf") +]) +def test_restore(request, node_type, test_user_conf, mocked_g_config, tmp_path): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) - result = run_command(backup_node, ['/tmp']) + result = run_command(backup_node, [tmp_path]) backup_path = result.output.replace('Backup archive successfully created: ', '').replace( '\n', '' ) with ( + patch('node_cli.cli.node.TYPE', node_type), patch('node_cli.core.node.restore_op', MagicMock()) as mock_restore_op, patch('subprocess.run', new=subprocess_run_mock), patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), @@ -337,44 +346,20 @@ def test_restore(mocked_g_config): 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), ), - # patch( - # 'node_cli.core.node.get_meta_info', - # return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), - # ), patch('node_cli.operations.base.configure_nftables'), - patch('node_cli.configs.env.validate_env_params'), + patch('node_cli.configs.user.validate_alias_or_address'), ): - result = run_command(restore_node, [backup_path, './tests/test-env']) + user_conf_path = request.getfixturevalue(test_user_conf).as_posix() + + result = run_command(restore_node, [backup_path, user_conf_path]) assert result.exit_code == 0 assert 'Node is restored from backup\n' in result.output # noqa + assert mock_restore_op.call_args[0][0].get('BACKUP_RUN') == 'True' - assert mock_restore_op.call_args[0][0].get('BACKUP_RUN') == 'True' - - -def test_restore_no_snapshot(mocked_g_config): - pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) - result = run_command(backup_node, ['/tmp']) - backup_path = result.output.replace('Backup archive successfully created: ', '').replace( - '\n', '' - ) - - with ( - patch('node_cli.core.node.restore_op', MagicMock()) as mock_restore_op, - patch('subprocess.run', new=subprocess_run_mock), - patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - patch('node_cli.utils.decorators.is_node_inited', return_value=False), - patch( - 'node_cli.core.node.CliMetaManager.get_meta_info', - return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), - ), - patch('node_cli.operations.base.configure_nftables'), - patch('node_cli.configs.env.validate_env_params'), - ): - result = run_command(restore_node, [backup_path, './tests/test-env', '--no-snapshot']) + result = run_command(restore_node, [backup_path, user_conf_path, '--no-snapshot']) assert result.exit_code == 0 assert 'Node is restored from backup\n' in result.output # noqa - - assert mock_restore_op.call_args[0][0].get('BACKUP_RUN') is None + assert mock_restore_op.call_args[0][0].get('BACKUP_RUN') is None def test_maintenance_on(): @@ -401,13 +386,15 @@ def test_maintenance_off(mocked_g_config): ) -def test_turn_off_maintenance_on(mocked_g_config): +def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.node.INIT_ENV_FILEPATH', regular_user_conf.as_posix()), mock.patch('node_cli.core.node.turn_off_op'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - patch('node_cli.configs.env.validate_env_params'), + mock.patch('node_cli.configs.user.validate_alias_or_address'), + mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR) ): result = run_command_mock( 'node_cli.utils.helper.requests.post', @@ -431,7 +418,7 @@ def test_turn_off_maintenance_on(mocked_g_config): assert result.exit_code == CLIExitCodes.UNSAFE_UPDATE -def test_turn_on_maintenance_off(mocked_g_config): +def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), @@ -439,13 +426,13 @@ def test_turn_on_maintenance_off(mocked_g_config): mock.patch('node_cli.core.node.turn_on_op'), mock.patch('node_cli.core.node.is_base_containers_alive'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - patch('node_cli.configs.env.validate_env_params'), + patch('node_cli.configs.user.validate_alias_or_address'), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', resp_mock, _turn_on, - ['./tests/test-env', '--maintenance-off', '--sync-schains', '--yes'], + [regular_user_conf.as_posix(), '--maintenance-off', '--sync-schains', '--yes'], ) assert result.exit_code == 0 diff --git a/tests/cli/resources_allocation_test.py b/tests/cli/resources_allocation_test.py index b317aad6..bdb8d8f0 100644 --- a/tests/cli/resources_allocation_test.py +++ b/tests/cli/resources_allocation_test.py @@ -17,22 +17,20 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import os import json -import mock -import requests +import os +import mock import pytest +import requests -from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH, NODE_DATA_PATH +from node_cli.cli.resources_allocation import generate, show +from node_cli.configs.resource_allocation import NODE_DATA_PATH, RESOURCE_ALLOCATION_FILEPATH from node_cli.utils.helper import safe_mkdir, write_json +from node_cli.utils.node_type import NodeType from tests.helper import response_mock, run_command_mock - -from node_cli.cli.resources_allocation import show, generate - from tests.resources_test import BIG_DISK_SIZE - TEST_CONFIG = {'test': 1} @@ -51,15 +49,18 @@ def test_show(resource_alloc_config): assert result.exit_code == 0 -def test_generate(): +def test_generate(regular_user_conf): safe_mkdir(NODE_DATA_PATH) resp_mock = response_mock(requests.codes.created) with ( mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.configs.env.validate_env_params'), + mock.patch('node_cli.configs.user.validate_alias_or_address'), ): result = run_command_mock( - 'node_cli.utils.helper.post_request', resp_mock, generate, ['./tests/test-env', '--yes'] + 'node_cli.utils.helper.post_request', + resp_mock, + generate, + [regular_user_conf.as_posix(), '--yes'] ) assert result.output == ( f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}\n' @@ -67,14 +68,18 @@ def test_generate(): assert result.exit_code == 0 -def test_generate_already_exists(resource_alloc_config): +def test_generate_already_exists(regular_user_conf, resource_alloc_config): resp_mock = response_mock(requests.codes.created) with ( mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.configs.env.validate_env_params'), + mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR), + mock.patch('node_cli.configs.user.validate_alias_or_address'), ): result = run_command_mock( - 'node_cli.utils.helper.post_request', resp_mock, generate, ['./tests/test-env', '--yes'] + 'node_cli.utils.helper.post_request', + resp_mock, + generate, + [regular_user_conf.as_posix(), '--yes'] ) assert result.output == 'Resource allocation file already exists\n' assert result.exit_code == 0 @@ -83,7 +88,7 @@ def test_generate_already_exists(resource_alloc_config): 'node_cli.utils.helper.post_request', resp_mock, generate, - ['./tests/test-env', '--yes', '--force'], + [regular_user_conf.as_posix(), '--yes', '--force'] ) assert result.output == ( f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}\n' diff --git a/tests/cli/sync_node_test.py b/tests/cli/sync_node_test.py index 27ecbb52..486b30e2 100644 --- a/tests/cli/sync_node_test.py +++ b/tests/cli/sync_node_test.py @@ -17,26 +17,26 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import logging import pathlib import mock -import logging -from node_cli.configs import SKALE_DIR, NODE_DATA_PATH +from node_cli.cli.sync_node import _cleanup_sync, _init_sync, _update_sync +from node_cli.configs import NODE_DATA_PATH, SKALE_DIR from node_cli.core.node_options import NodeOptions -from node_cli.cli.sync_node import _init_sync, _update_sync, _cleanup_sync -from node_cli.utils.meta import CliMeta from node_cli.utils.helper import init_default_logger - +from node_cli.utils.meta import CliMeta +from node_cli.utils.node_type import NodeType +from tests.conftest import set_env_var from tests.helper import run_command, subprocess_run_mock from tests.resources_test import BIG_DISK_SIZE -from tests.conftest import set_env_var logger = logging.getLogger(__name__) init_default_logger() -def test_init_sync(mocked_g_config, clean_node_options): +def test_init_sync(mocked_g_config, clean_node_options, sync_user_conf): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), @@ -45,9 +45,9 @@ def test_init_sync(mocked_g_config, clean_node_options): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.env.validate_env_params'), + mock.patch('node_cli.configs.user.validate_alias_or_address'), ): - result = run_command(_init_sync, ['./tests/test-env']) + result = run_command(_init_sync, [sync_user_conf.as_posix()]) node_options = NodeOptions() assert not node_options.archive @@ -56,8 +56,7 @@ def test_init_sync(mocked_g_config, clean_node_options): assert result.exit_code == 0 - -def test_init_sync_archive(mocked_g_config, clean_node_options): +def test_init_sync_archive(mocked_g_config, clean_node_options, sync_user_conf): pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) with ( mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), @@ -77,9 +76,10 @@ def test_init_sync_archive(mocked_g_config, clean_node_options): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.env.validate_env_params'), + mock.patch('node_cli.configs.user.validate_alias_or_address'), + mock.patch('node_cli.cli.node.TYPE', NodeType.SYNC) ): - result = run_command(_init_sync, ['./tests/test-env', '--archive']) + result = run_command(_init_sync, [sync_user_conf.as_posix(), '--archive']) node_options = NodeOptions() assert node_options.archive @@ -105,8 +105,7 @@ def test_init_archive_indexer_fail(mocked_g_config, clean_node_options): assert result.exit_code == 1 assert 'Cannot use both' in result.output - -def test_update_sync(mocked_g_config): +def test_update_sync(sync_user_conf, mocked_g_config): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( @@ -120,9 +119,9 @@ def test_update_sync(mocked_g_config): 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), - mock.patch('node_cli.configs.env.validate_env_params'), + mock.patch('node_cli.configs.user.validate_alias_or_address'), ): - result = run_command(_update_sync, ['./tests/test-env', '--yes']) + result = run_command(_update_sync, [sync_user_conf.as_posix(), '--yes']) assert result.exit_code == 0 diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 5ac014c1..db0187eb 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -11,18 +11,14 @@ get_network_metadata, validate_contract_address, validate_contract_alias, - validate_env_alias_or_address, + validate_alias_or_address, ) -from node_cli.configs.env import ( +from node_cli.configs.user import ( ALLOWED_ENV_TYPES, - OPTIONAL_PARAMS, - REQUIRED_PARAMS_MIRAGE, - REQUIRED_PARAMS_MIRAGE_BOOT, MirageBootUserConfig, MirageUserConfig, SkaleUserConfig, SyncUserConfig, - absent_required_params, get_user_config_type, get_validated_user_config, validate_env_type, @@ -156,7 +152,7 @@ def test_validate_contract_alias(requests_mock, networks, should_raise): def test_validate_env_alias_or_address_with_address(requests_mock): addr = '0x' + 'b' * 40 requests_mock.post(ENDPOINT, json={'result': '0x1'}) - validate_env_alias_or_address(addr, ContractType.IMA, ENDPOINT) + validate_alias_or_address(addr, ContractType.IMA, ENDPOINT) def test_validate_env_alias_or_address_with_alias(requests_mock): @@ -166,70 +162,7 @@ def test_validate_env_alias_or_address_with_alias(requests_mock): requests_mock.get(metadata_url, json=metadata, status_code=200) alias_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' requests_mock.get(alias_url, status_code=200) - validate_env_alias_or_address('test-alias', ContractType.IMA, ENDPOINT) - - -@pytest.mark.parametrize( - 'node_type, is_boot, required_keys_dict', - [ - (NodeType.MIRAGE, True, REQUIRED_PARAMS_MIRAGE_BOOT), - (NodeType.MIRAGE, False, REQUIRED_PARAMS_MIRAGE), - ], - ids=['mirage_boot', 'mirage_regular'], -) -@mock.patch('node_cli.configs.alias_address_validation.validate_env_alias_or_address') -@mock.patch('node_cli.configs.alias_address_validation.get_chain_id', return_value=1) -@mock.patch( - 'node_cli.configs.alias_address_validation.get_network_metadata', - return_value={'networks': [{'chainId': 1, 'path': 'mainnet'}]}, -) -def test_get_validated_env_config_mirage_success( - mock_meta, - mock_chain, - mock_validate_alias, - tmp_path, - monkeypatch, - node_type, - is_boot, - required_keys_dict, -): - env_file = tmp_path / 'mirage.env' - env_content = '' - expected_config = {} - - for key in {**required_keys_dict, **OPTIONAL_PARAMS}: - env_value = f'{key}_value' - if key == 'ENDPOINT': - env_value = ENDPOINT - if key == 'ENV_TYPE': - env_value = 'devnet' - if key == 'MANAGER_CONTRACTS': - env_value = '0x' + '1' * 40 - if key == 'IMA_CONTRACTS': - env_value = '0x' + '2' * 40 - - if key in required_keys_dict: - env_content += f'{key}={env_value}\n' - monkeypatch.setenv(key, env_value) - expected_config[key] = env_value - - env_file.write_text(env_content) - - with mock.patch('node_cli.configs.alias_address_validation.requests.post') as mock_post: - mock_post.return_value = FakeResponse(200, {'result': '0x123'}) - - user_config = get_validated_user_config( - node_type=node_type, env_filepath=str(env_file), is_mirage_boot=is_boot - ) - - assert user_config is not None - plain_config = user_config.to_env() - assert set(plain_config.keys()) == set(expected_config.keys()) - for key in expected_config: - assert plain_config[key] == expected_config[key] - - for key in {**required_keys_dict, **OPTIONAL_PARAMS}: - monkeypatch.delenv(key, raising=False) + validate_alias_or_address('test-alias', ContractType.IMA, ENDPOINT) def test_get_validated_env_config_missing_file(): diff --git a/tests/conftest.py b/tests/conftest.py index 369b2374..9cbbde01 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -298,3 +298,88 @@ def set_env_var(name, value): del os.environ[name] else: os.environ[name] = old_value + + +@pytest.fixture +def regular_user_conf(tmp_path): + test_env_path = pathlib.Path(tmp_path / 'test-env') + try: + test_env = """ + ENDPOINT=http://localhost:8545 + CONTAINER_CONFIGS_STREAM='main' + FILEBEAT_HOST=127.0.0.1:3010 + SGX_SERVER_URL=http://127.0.0.1 + DISK_MOUNTPOINT=/dev/sss + DOCKER_LVMPY_STREAM='master' + ENV_TYPE='devnet' + MANAGER_CONTRACTS='test-manager' + IMA_CONTRACTS='test-ima' + """ + with open(test_env_path, 'w') as env_file: + env_file.write(test_env) + yield test_env_path + finally: + test_env_path.unlink() + + +@pytest.fixture +def mirage_user_conf(tmp_path): + test_env_path = pathlib.Path(tmp_path / 'test-env') + try: + test_env = """ + ENDPOINT=http://localhost:8545 + CONTAINER_CONFIGS_STREAM='main' + FILEBEAT_HOST=127.0.0.1:3010 + SGX_SERVER_URL=http://127.0.0.1 + DISK_MOUNTPOINT=/dev/sss + ENV_TYPE='devnet' + ENFORCE_BTRFS=False + MIRAGE_CONTRACTS='test-mirage' + """ + with open(test_env_path, 'w') as env_file: + env_file.write(test_env) + yield test_env_path + finally: + test_env_path.unlink() + + +@pytest.fixture +def mirage_boot_user_conf(tmp_path): + test_env_path = pathlib.Path(tmp_path / 'test-env') + try: + test_env = """ + ENDPOINT=http://localhost:8545 + CONTAINER_CONFIGS_STREAM='main' + FILEBEAT_HOST=127.0.0.1:3010 + SGX_SERVER_URL=http://127.0.0.1 + DISK_MOUNTPOINT=/dev/sss + ENV_TYPE='devnet' + MANAGER_CONTRACTS='test-manager' + IMA_CONTRACTS='test-ima' + """ + with open(test_env_path, 'w') as env_file: + env_file.write(test_env) + yield test_env_path + finally: + test_env_path.unlink() + + +@pytest.fixture +def sync_user_conf(tmp_path): + test_env_path = pathlib.Path(tmp_path / 'test-env') + try: + test_env = """ + ENDPOINT=http://localhost:8545 + CONTAINER_CONFIGS_STREAM='main' + FILEBEAT_HOST=127.0.0.1:3010 + DISK_MOUNTPOINT=/dev/sss + ENV_TYPE='devnet' + SCHAIN_NAME='test-schain' + ENFORCE_BTRFS=False + MANAGER_CONTRACTS='test-manager' + """ + with open(test_env_path, 'w') as env_file: + env_file.write(test_env) + yield test_env_path + finally: + test_env_path.unlink() diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 27c96cf7..29813df5 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -11,6 +11,7 @@ from node_cli.configs import NODE_DATA_PATH, SCHAINS_MNT_DIR_REGULAR, SCHAINS_MNT_DIR_SINGLE_CHAIN from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH +from node_cli.configs.user import SkaleUserConfig from node_cli.core.node import ( get_expected_container_names, is_base_containers_alive, @@ -149,15 +150,15 @@ def test_is_base_containers_alive_empty(node_type, is_boot): @pytest.mark.parametrize( ( - 'node_type, is_boot, inited_node, sync_schains, expected_mnt_dir, ' + 'node_type, test_user_conf, is_boot, inited_node, sync_schains, expected_mnt_dir,' 'expect_flask_key, expect_backup_run' ), [ - (NodeType.REGULAR, False, True, False, SCHAINS_MNT_DIR_REGULAR, True, False), - (NodeType.REGULAR, False, True, True, SCHAINS_MNT_DIR_REGULAR, True, True), - (NodeType.SYNC, False, False, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, False, False), - (NodeType.MIRAGE, True, True, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, True, False), - (NodeType.MIRAGE, False, True, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, True, False), + (NodeType.REGULAR, 'regular_user_conf', False, True, False, SCHAINS_MNT_DIR_REGULAR, True, False), + (NodeType.REGULAR, 'regular_user_conf', False, True, True, SCHAINS_MNT_DIR_REGULAR, True, True), + (NodeType.SYNC, 'sync_user_conf', False, False, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, False, False), + (NodeType.MIRAGE, 'mirage_boot_user_conf', True, True, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, True, False), + (NodeType.MIRAGE, 'mirage_user_conf', False, True, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, True, False), ], ids=[ 'regular', @@ -167,42 +168,41 @@ def test_is_base_containers_alive_empty(node_type, is_boot): 'mirage_regular', ], ) -@mock.patch('node_cli.core.node.get_validated_env_config') -@mock.patch('node_cli.core.node.save_env_params') -@mock.patch('node_cli.core.node.get_flask_secret_key', return_value='mock_secret') def test_compose_node_env( - mock_get_secret, - mock_save_params, - mock_get_validated, + request, node_type, + test_user_conf, is_boot, inited_node, sync_schains, expected_mnt_dir, expect_flask_key, expect_backup_run, - valid_env_file, - valid_env_params, ): - mock_get_validated.return_value = valid_env_params.copy() - if node_type == NodeType.SYNC: - mock_get_validated.return_value['ENV_TYPE'] = 'devnet' - else: - mock_get_validated.return_value['ENV_TYPE'] = 'mainnet' - - result_env = compose_node_env( - env_filepath=valid_env_file, - inited_node=inited_node, - sync_schains=sync_schains, - node_type=node_type, - is_mirage_boot=is_boot, - save=True, - ) + user_config_path = request.getfixturevalue(test_user_conf) + # mock_get_validated.return_value = valid_env_params.copy() + # if node_type == NodeType.SYNC: + # mock_get_validated.return_value['ENV_TYPE'] = 'devnet' + # else: + # mock_get_validated.return_value['ENV_TYPE'] = 'mainnet' + with ( + mock.patch('node_cli.configs.user.validate_alias_or_address'), + mock.patch('node_cli.core.node.save_env_params'), + mock.patch('node_cli.core.node.get_flask_secret_key', return_value='mock_secret') + ): + result_env = compose_node_env( + env_filepath=user_config_path.as_posix(), + inited_node=inited_node, + sync_schains=sync_schains, + node_type=node_type, + is_mirage_boot=is_boot, + save=True, + ) - mock_save_params.assert_called_once_with(valid_env_file) - mock_get_validated.assert_called_once_with( - env_filepath=valid_env_file, node_type=node_type, is_mirage_boot=is_boot - ) + # mock_save_params.assert_called_once_with(user_config_path) + # mock_get_validated.assert_called_once_with( + # env_filepath=valid_env_file, node_type=node_type, is_mirage_boot=is_boot + # ) assert result_env['SCHAINS_MNT_DIR'] == expected_mnt_dir assert ( 'FLASK_SECRET_KEY' in result_env and result_env['FLASK_SECRET_KEY'] is not None @@ -211,7 +211,7 @@ def test_compose_node_env( assert result_env['FLASK_SECRET_KEY'] == 'mock_secret' should_have_backup = sync_schains and node_type != NodeType.SYNC assert ('BACKUP_RUN' in result_env and result_env['BACKUP_RUN'] == 'True') == should_have_backup - assert result_env['ENDPOINT'] == valid_env_params['ENDPOINT'] + # assert result_env['ENDPOINT'] == valid_env_params['ENDPOINT'] @pytest.fixture @@ -274,11 +274,9 @@ def resource_file(): if os.path.exists(RESOURCE_ALLOCATION_FILEPATH): os.remove(RESOURCE_ALLOCATION_FILEPATH) - -def test_init_node(no_resource_file): # todo: write new init node test +def test_init_node(regular_user_conf, no_resource_file): # todo: write new init node test resp_mock = response_mock(requests.codes.created) assert not os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) - env_filepath = './tests/test-env' with ( mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), @@ -288,15 +286,13 @@ def test_init_node(no_resource_file): # todo: write new init node test mock.patch('node_cli.core.node.init_op'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.utils.helper.post_request', resp_mock), - mock.patch('node_cli.configs.env.validate_env_params'), + mock.patch('node_cli.configs.user.validate_alias_or_address'), ): - init(env_filepath=env_filepath, node_type=NodeType.REGULAR) + init(env_filepath=regular_user_conf.as_posix(), node_type=NodeType.REGULAR) assert os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) -def test_update_node(node_type, mocked_g_config, resource_file, inited_node): - env_filepath = './tests/test-env' +def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_node): resp_mock = response_mock(requests.codes.created) os.makedirs(NODE_DATA_PATH, exist_ok=True) with ( @@ -314,12 +310,16 @@ def test_update_node(node_type, mocked_g_config, resource_file, inited_node): 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), - mock.patch('node_cli.configs.env.validate_env_params'), + mock.patch('node_cli.configs.user.validate_alias_or_address'), ): with mock.patch( 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() ): # noqa - result = update(env_filepath, pull_config_for_schain=None, node_type=node_type) + result = update( + regular_user_conf.as_posix(), + pull_config_for_schain=None, + node_type=NodeType.REGULAR + ) assert result is None diff --git a/tests/test-env b/tests/test-env deleted file mode 100644 index 7698a8b8..00000000 --- a/tests/test-env +++ /dev/null @@ -1,15 +0,0 @@ -ENDPOINT=http://localhost:8545 -IMA_ENDPOINT=http://127.0.01 -DB_USER=user -DB_PASSWORD=pass -DB_PORT=3307 -CONTAINER_CONFIGS_STREAM='master' -FILEBEAT_HOST=127.0.0.1:3010 -SGX_SERVER_URL=http://127.0.0.1 -DISK_MOUNTPOINT=/dev/sss -DOCKER_LVMPY_STREAM='master' -ENV_TYPE='devnet' -SCHAIN_NAME='test' -ENFORCE_BTRFS=False -MANAGER_CONTRACTS='test-manager' -IMA_CONTRACTS='test-ima' \ No newline at end of file From 40fc1c4537ac4a93b9a40fec3c8cc7b6e22addd8 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 16 Jun 2025 17:49:40 +0100 Subject: [PATCH 095/332] Fix tests --- node_cli/core/node.py | 2 +- tests/cli/node_test.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 58b7e383..ca60ba5e 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -173,7 +173,7 @@ def restore(backup_path, env_filepath, node_type: NodeType, no_snapshot=False, c restored_ok = restore_op(env, backup_path, node_type=node_type, config_only=config_only) if not restored_ok: error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) - # time.sleep(RESTORE_SLEEP_TIMEOUT) + time.sleep(RESTORE_SLEEP_TIMEOUT) logger.info('Generating resource allocation file ...') update_resource_allocation(env['ENV_TYPE']) print('Node is restored from backup') diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 67323207..a2fb4fa4 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -390,7 +390,7 @@ def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), - mock.patch('node_cli.core.node.INIT_ENV_FILEPATH', regular_user_conf.as_posix()), + mock.patch('node_cli.core.node.SKALE_DIR_ENV_FILEPATH', regular_user_conf.as_posix()), mock.patch('node_cli.core.node.turn_off_op'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), mock.patch('node_cli.configs.user.validate_alias_or_address'), @@ -426,7 +426,8 @@ def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf): mock.patch('node_cli.core.node.turn_on_op'), mock.patch('node_cli.core.node.is_base_containers_alive'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - patch('node_cli.configs.user.validate_alias_or_address'), + mock.patch('node_cli.configs.user.validate_alias_or_address'), + mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR) ): result = run_command_mock( 'node_cli.utils.helper.requests.post', From bc8313d2ca502091d4659f624a3597f7d4270be2 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 16 Jun 2025 18:17:33 +0100 Subject: [PATCH 096/332] Fix linter --- tests/cli/node_test.py | 24 ++++---- tests/configs/configs_env_validate_test.py | 3 +- tests/core/core_node_test.py | 68 ++++++++++++++++++---- 3 files changed, 68 insertions(+), 27 deletions(-) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index a2fb4fa4..fa966858 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -17,34 +17,32 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import logging import pathlib -import mock from unittest.mock import MagicMock, patch -import requests -import logging +import mock import pytest +import requests -from node_cli.utils.node_type import NodeType - -from node_cli.configs import INIT_ENV_FILEPATH, SKALE_DIR, G_CONF_HOME from node_cli.cli.node import ( + _set_domain_name, + _turn_off, + _turn_on, + backup_node, node_info, register_node, - signature, - backup_node, + remove_node_from_maintenance, restore_node, set_node_in_maintenance, - remove_node_from_maintenance, + signature, version, - _turn_off, - _turn_on, - _set_domain_name, ) +from node_cli.configs import G_CONF_HOME, SKALE_DIR from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import init_default_logger from node_cli.utils.meta import CliMeta - +from node_cli.utils.node_type import NodeType from tests.helper import ( response_mock, run_command, diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index db0187eb..449b9a4e 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -1,7 +1,6 @@ import os from typing import Optional -import mock import pytest import requests @@ -9,9 +8,9 @@ ContractType, get_chain_id, get_network_metadata, + validate_alias_or_address, validate_contract_address, validate_contract_alias, - validate_alias_or_address, ) from node_cli.configs.user import ( ALLOWED_ENV_TYPES, diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 29813df5..e06b88ad 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -11,19 +11,17 @@ from node_cli.configs import NODE_DATA_PATH, SCHAINS_MNT_DIR_REGULAR, SCHAINS_MNT_DIR_SINGLE_CHAIN from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH -from node_cli.configs.user import SkaleUserConfig from node_cli.core.node import ( + compose_node_env, get_expected_container_names, - is_base_containers_alive, init, + is_base_containers_alive, + is_update_safe, pack_dir, update, - is_update_safe, - compose_node_env, ) from node_cli.utils.meta import CliMeta from node_cli.utils.node_type import NodeType - from tests.helper import response_mock, safe_update_api_response, subprocess_run_mock from tests.resources_test import BIG_DISK_SIZE @@ -154,11 +152,56 @@ def test_is_base_containers_alive_empty(node_type, is_boot): 'expect_flask_key, expect_backup_run' ), [ - (NodeType.REGULAR, 'regular_user_conf', False, True, False, SCHAINS_MNT_DIR_REGULAR, True, False), - (NodeType.REGULAR, 'regular_user_conf', False, True, True, SCHAINS_MNT_DIR_REGULAR, True, True), - (NodeType.SYNC, 'sync_user_conf', False, False, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, False, False), - (NodeType.MIRAGE, 'mirage_boot_user_conf', True, True, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, True, False), - (NodeType.MIRAGE, 'mirage_user_conf', False, True, False, SCHAINS_MNT_DIR_SINGLE_CHAIN, True, False), + ( + NodeType.REGULAR, + 'regular_user_conf', + False, + True, + False, + SCHAINS_MNT_DIR_REGULAR, + True, + False, + ), + ( + NodeType.REGULAR, + 'regular_user_conf', + False, + True, + True, + SCHAINS_MNT_DIR_REGULAR, + True, + True, + ), + ( + NodeType.SYNC, + 'sync_user_conf', + False, + False, + False, + SCHAINS_MNT_DIR_SINGLE_CHAIN, + False, + False, + ), + ( + NodeType.MIRAGE, + 'mirage_boot_user_conf', + True, + True, + False, + SCHAINS_MNT_DIR_SINGLE_CHAIN, + True, + False, + ), + ( + NodeType.MIRAGE, + 'mirage_user_conf', + False, + True, + False, + SCHAINS_MNT_DIR_SINGLE_CHAIN, + True, + False, + ), ], ids=[ 'regular', @@ -188,7 +231,7 @@ def test_compose_node_env( with ( mock.patch('node_cli.configs.user.validate_alias_or_address'), mock.patch('node_cli.core.node.save_env_params'), - mock.patch('node_cli.core.node.get_flask_secret_key', return_value='mock_secret') + mock.patch('node_cli.core.node.get_flask_secret_key', return_value='mock_secret'), ): result_env = compose_node_env( env_filepath=user_config_path.as_posix(), @@ -274,6 +317,7 @@ def resource_file(): if os.path.exists(RESOURCE_ALLOCATION_FILEPATH): os.remove(RESOURCE_ALLOCATION_FILEPATH) + def test_init_node(regular_user_conf, no_resource_file): # todo: write new init node test resp_mock = response_mock(requests.codes.created) assert not os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) @@ -318,7 +362,7 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n result = update( regular_user_conf.as_posix(), pull_config_for_schain=None, - node_type=NodeType.REGULAR + node_type=NodeType.REGULAR, ) assert result is None From be53af5df7a7b56db12a0e761d759504c71ec624 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 16 Jun 2025 18:21:52 +0100 Subject: [PATCH 097/332] Add missing license header. Improve corner cases handling --- node_cli/core/schains.py | 40 ++++++++++++++++++++------ node_cli/core/static_config.py | 20 +++++++++++++ node_cli/mirage/record/redis_record.py | 7 ++--- node_cli/utils/texts.py | 10 +++---- 4 files changed, 59 insertions(+), 18 deletions(-) diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 957e02df..783722fa 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -1,3 +1,22 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025 SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + import glob import logging import os @@ -5,31 +24,34 @@ import shutil import time from pathlib import Path - from typing import Dict, Optional +from lvmpy.src.core import mount, volume_mountpoint from node_cli.configs import ( ALLOCATION_FILEPATH, - NODE_CONFIG_PATH, NODE_CLI_STATUS_FILENAME, + NODE_CONFIG_PATH, SCHAIN_NODE_DATA_PATH, SCHAINS_MNT_DIR_SINGLE_CHAIN, ) from node_cli.configs.env import get_validated_env_config - -from node_cli.utils.helper import get_request, error_exit, safe_load_yml +from node_cli.utils.docker_utils import ensure_volume, is_volume_exists from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import ( + error_exit, + get_request, + read_json, + run_cmd, + safe_load_yml, + save_json, +) +from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import ( print_dkg_statuses, print_firewall_rules, print_schain_info, print_schains, ) -from node_cli.utils.docker_utils import ensure_volume, is_volume_exists -from node_cli.utils.helper import read_json, run_cmd, save_json -from node_cli.utils.node_type import NodeType -from lvmpy.src.core import mount, volume_mountpoint - logger = logging.getLogger(__name__) diff --git a/node_cli/core/static_config.py b/node_cli/core/static_config.py index d5252687..721251ce 100644 --- a/node_cli/core/static_config.py +++ b/node_cli/core/static_config.py @@ -1,4 +1,24 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025 SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + import os + import yaml from node_cli.configs import ( diff --git a/node_cli/mirage/record/redis_record.py b/node_cli/mirage/record/redis_record.py index 78f87d94..b71aa9e4 100644 --- a/node_cli/mirage/record/redis_record.py +++ b/node_cli/mirage/record/redis_record.py @@ -18,9 +18,9 @@ # along with this program. If not, see . import abc -from typing import Any -from datetime import datetime from dataclasses import dataclass +from datetime import datetime +from typing import Any import redis @@ -29,6 +29,7 @@ cpool: redis.ConnectionPool = redis.ConnectionPool.from_url(REDIS_URI) rs: redis.Redis = redis.Redis(connection_pool=cpool) + @dataclass class FieldInfo: name: str @@ -77,8 +78,6 @@ def _deserialize_datetime(self, value: str) -> datetime: def _get_field(self, field_name: str): key = self._get_field_key(field_name) value = rs.get(key) - if value is None: - raise ValueError(f"Field '{field_name}' not found in record '{self.name}'") return self._deserialize_field(value, self._record_fields()[field_name].type) def _set_field(self, field_name: str, value) -> None: diff --git a/node_cli/utils/texts.py b/node_cli/utils/texts.py index 718edd75..78102692 100644 --- a/node_cli/utils/texts.py +++ b/node_cli/utils/texts.py @@ -17,13 +17,13 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +from typing import Dict + import yaml + from node_cli.configs import TEXT_FILE -def safe_load_texts(): +def safe_load_texts() -> Dict: with open(TEXT_FILE, 'r') as stream: - try: - return yaml.safe_load(stream) - except yaml.YAMLError as exc: - print(exc) + return yaml.safe_load(stream) From 7a816b752f86bf8d0a09007ad4138e2473560dc5 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 16 Jun 2025 18:41:35 +0100 Subject: [PATCH 098/332] Update to helper-scripts with redis --- .github/workflows/test.yml | 4 ++++ helper-scripts | 2 +- scripts/run_redis.sh | 5 ----- scripts/run_tests.sh | 2 -- 4 files changed, 5 insertions(+), 8 deletions(-) delete mode 100755 scripts/run_redis.sh diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 04bde884..026ac8e8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -71,6 +71,10 @@ jobs: run: | scripts/build.sh test test normal + - name: Run redis + run: | + ./helper-scripts/redis/run.sh + - name: Run tests run: | export PYTHONPATH=${PYTHONPATH}:/usr/lib/python3/dist-packages/ diff --git a/helper-scripts b/helper-scripts index 7c6ccee7..5c5bf3a0 160000 --- a/helper-scripts +++ b/helper-scripts @@ -1 +1 @@ -Subproject commit 7c6ccee7599f30ddec3cf0d7747dd031fd57cb27 +Subproject commit 5c5bf3a09500d605d72b53e2da8e871ffca1403a diff --git a/scripts/run_redis.sh b/scripts/run_redis.sh deleted file mode 100755 index b3591b97..00000000 --- a/scripts/run_redis.sh +++ /dev/null @@ -1,5 +0,0 @@ -set -e - -docker rm -f redis || true -export DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -docker run -v $DIR/../tests/redis-conf:/config --network=host --name=redis -d redis:6.0-alpine diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index c85201fc..8c5d7fd2 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -3,8 +3,6 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" PROJECT_DIR=$(dirname $DIR) -bash scripts/run_redis.sh - LVMPY_LOG_DIR="$PROJECT_DIR/tests/" \ HIDE_STREAM_LOG=true \ TEST_HOME_DIR="$PROJECT_DIR/tests/" \ From 6caf68b7d7a8fb7df020eaaa30141f4798a7fbf9 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 16 Jun 2025 18:48:13 +0100 Subject: [PATCH 099/332] Fix run_tests.sh script --- scripts/run_tests.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index 8c5d7fd2..97676b9a 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -8,4 +8,4 @@ LVMPY_LOG_DIR="$PROJECT_DIR/tests/" \ TEST_HOME_DIR="$PROJECT_DIR/tests/" \ GLOBAL_SKALE_DIR="$PROJECT_DIR/tests/etc/skale" \ DOTENV_FILEPATH='tests/test-env' \ - py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests/mirage $@ + py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests $@ From c3c4320e132dc1813f64405f8b652a2019bc0416 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 16 Jun 2025 19:47:20 +0100 Subject: [PATCH 100/332] Fix test_assert_no_containers --- tests/core/host/docker_config_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/host/docker_config_test.py b/tests/core/host/docker_config_test.py index 4d87cb54..9f53828b 100644 --- a/tests/core/host/docker_config_test.py +++ b/tests/core/host/docker_config_test.py @@ -131,7 +131,7 @@ def container(dclient): def test_assert_no_containers(): - assert_no_containers(ignore=('ganache',)) + assert_no_containers(ignore=('redis',)) def test_assert_no_containers_failed(container): From 7299104a9997a0cd532af09757c00a3e9952aadc Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 12:24:21 +0100 Subject: [PATCH 101/332] Rename ENDPOINT to BOOT_ENDPOINT for mirage --- node_cli/configs/user.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 9015d41f..f97e4ec0 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -45,7 +45,6 @@ class ValidationResult(NamedTuple): @dataclass(kw_only=True) class BaseUserConfig(ABC): container_configs_stream: str - endpoint: str env_type: str filebeat_host: str disk_mountpoint: str @@ -84,12 +83,14 @@ def validate_params(cls, params: Dict) -> ValidationResult: @dataclass class MirageUserConfig(BaseUserConfig): mirage_contracts: str + boot_endpoint: str sgx_server_url: str enforce_btrfs: str = '' @dataclass class MirageBootUserConfig(BaseUserConfig): + endpoint: str manager_contracts: str ima_contracts: str sgx_server_url: str @@ -98,6 +99,7 @@ class MirageBootUserConfig(BaseUserConfig): @dataclass class SkaleUserConfig(BaseUserConfig): + endpoint: str manager_contracts: str ima_contracts: str docker_lvmpy_stream: str @@ -115,6 +117,7 @@ class SkaleUserConfig(BaseUserConfig): @dataclass class SyncUserConfig(BaseUserConfig): + endpoint: str manager_contracts: str schain_name: str = '' ima_contracts: str = '' From 53c6017fc15ed63f6a8aed820819eaa966cd9484 Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 12:43:42 +0100 Subject: [PATCH 102/332] Fix tests --- tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9cbbde01..42b7fccc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -327,7 +327,7 @@ def mirage_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') try: test_env = """ - ENDPOINT=http://localhost:8545 + BOOT_ENDPOINT=http://localhost:8545 CONTAINER_CONFIGS_STREAM='main' FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 From 58d6bdbd1187c45c692b1a8c32a60fb5be1abe07 Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 13:02:13 +0100 Subject: [PATCH 103/332] Fix validation --- node_cli/configs/user.py | 18 ++++++++++-------- tests/core/core_node_test.py | 11 +---------- 2 files changed, 11 insertions(+), 18 deletions(-) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index f97e4ec0..4b386896 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -19,16 +19,16 @@ import inspect import os -from typing import Dict, NamedTuple -from dataclasses import dataclass from abc import ABC +from dataclasses import dataclass +from typing import Dict, NamedTuple from dotenv.main import DotEnv -from node_cli.configs import SKALE_DIR, CONTAINER_CONFIG_PATH -from node_cli.configs.alias_address_validation import validate_alias_or_address, ContractType -from node_cli.utils.node_type import NodeType +from node_cli.configs import CONTAINER_CONFIG_PATH, SKALE_DIR +from node_cli.configs.alias_address_validation import ContractType, validate_alias_or_address from node_cli.utils.helper import error_exit +from node_cli.utils.node_type import NodeType SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') @@ -149,15 +149,17 @@ def get_validated_user_config( def validate_user_config(user_config: BaseUserConfig) -> None: validate_env_type(env_type=user_config.env_type) - if isinstance(user_config, MirageUserConfig): + if isinstance(user_config, MirageUserConfig): contract_alias_or_address = user_config.mirage_contracts + endpoint = user_config.boot_endpoint else: contract_alias_or_address = user_config.manager_contracts + endpoint = user_config.endpoint - validate_alias_or_address(contract_alias_or_address, ContractType.MANAGER, user_config.endpoint) + validate_alias_or_address(contract_alias_or_address, ContractType.MANAGER, endpoint) if isinstance(user_config, (SkaleUserConfig, MirageBootUserConfig)): - validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, user_config.endpoint) + validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, endpoint) def to_lower_keys(params: Dict[str, str]) -> Dict[str, str]: diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index e06b88ad..fb1484d6 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -223,11 +223,7 @@ def test_compose_node_env( expect_backup_run, ): user_config_path = request.getfixturevalue(test_user_conf) - # mock_get_validated.return_value = valid_env_params.copy() - # if node_type == NodeType.SYNC: - # mock_get_validated.return_value['ENV_TYPE'] = 'devnet' - # else: - # mock_get_validated.return_value['ENV_TYPE'] = 'mainnet' + with ( mock.patch('node_cli.configs.user.validate_alias_or_address'), mock.patch('node_cli.core.node.save_env_params'), @@ -242,10 +238,6 @@ def test_compose_node_env( save=True, ) - # mock_save_params.assert_called_once_with(user_config_path) - # mock_get_validated.assert_called_once_with( - # env_filepath=valid_env_file, node_type=node_type, is_mirage_boot=is_boot - # ) assert result_env['SCHAINS_MNT_DIR'] == expected_mnt_dir assert ( 'FLASK_SECRET_KEY' in result_env and result_env['FLASK_SECRET_KEY'] is not None @@ -254,7 +246,6 @@ def test_compose_node_env( assert result_env['FLASK_SECRET_KEY'] == 'mock_secret' should_have_backup = sync_schains and node_type != NodeType.SYNC assert ('BACKUP_RUN' in result_env and result_env['BACKUP_RUN'] == 'True') == should_have_backup - # assert result_env['ENDPOINT'] == valid_env_params['ENDPOINT'] @pytest.fixture From a405ab618197d20dd3b52df83696a37f0ca50357 Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 16:41:33 +0100 Subject: [PATCH 104/332] Use proper CliMetaManager for mirage operations --- node_cli/configs/user.py | 21 ++++++++-------- node_cli/core/mirage_node.py | 8 +++--- node_cli/operations/__init__.py | 3 +-- node_cli/operations/base.py | 44 +-------------------------------- node_cli/operations/mirage.py | 2 +- node_cli/utils/meta.py | 3 +-- 6 files changed, 18 insertions(+), 63 deletions(-) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 4b386896..7ffc338a 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -130,8 +130,8 @@ def get_validated_user_config( is_mirage_boot: bool = False, ) -> BaseUserConfig: params = parse_env_file(env_filepath) - user_config_type = get_user_config_type(node_type, is_mirage_boot) - _, missing_params, extra_params = user_config_type.validate_params(params) + user_config_class = get_user_config_class(node_type, is_mirage_boot) + _, missing_params, extra_params = user_config_class.validate_params(params) if len(missing_params) > 0: error_exit(f'Missing required parameters: {missing_params}') @@ -140,7 +140,7 @@ def get_validated_user_config( error_exit(f'Extra parameters: {extra_params}') params = to_lower_keys(params) - user_config = user_config_type(**params) + user_config = user_config_class(**params) validate_user_config(user_config) return user_config @@ -155,8 +155,7 @@ def validate_user_config(user_config: BaseUserConfig) -> None: else: contract_alias_or_address = user_config.manager_contracts endpoint = user_config.endpoint - - validate_alias_or_address(contract_alias_or_address, ContractType.MANAGER, endpoint) + validate_alias_or_address(contract_alias_or_address, ContractType.MANAGER, endpoint) if isinstance(user_config, (SkaleUserConfig, MirageBootUserConfig)): validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, endpoint) @@ -172,19 +171,19 @@ def parse_env_file(env_filepath: str) -> Dict: return DotEnv(env_filepath).dict() -def get_user_config_type( +def get_user_config_class( node_type: NodeType, is_mirage_boot: bool = False, ) -> type[BaseUserConfig]: if node_type == NodeType.MIRAGE and is_mirage_boot: - user_config_type = MirageBootUserConfig + user_config_class = MirageBootUserConfig elif node_type == NodeType.MIRAGE: - user_config_type = MirageUserConfig + user_config_class = MirageUserConfig elif node_type == NodeType.SYNC: - user_config_type = SyncUserConfig + user_config_class = SyncUserConfig else: - user_config_type = SkaleUserConfig - return user_config_type + user_config_class = SkaleUserConfig + return user_config_class def validate_env_type(env_type: str) -> None: diff --git a/node_cli/core/mirage_node.py b/node_cli/core/mirage_node.py index 2eff8c22..5cde18e4 100644 --- a/node_cli/core/mirage_node.py +++ b/node_cli/core/mirage_node.py @@ -21,10 +21,10 @@ import logging import time -from node_cli.configs import SKALE_DIR, RESTORE_SLEEP_TIMEOUT +from node_cli.configs import RESTORE_SLEEP_TIMEOUT, SKALE_DIR from node_cli.core.host import save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.operations import update_mirage_op, restore_mirage_op, MirageUpdateType +from node_cli.operations import MirageUpdateType, restore_mirage_op, update_mirage_op from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit @@ -54,7 +54,7 @@ def restore_mirage(backup_path, env_filepath, config_only=False): def migrate_from_boot( env_filepath: str, ) -> None: - logger.info('Node update started') + logger.info('Migrating from boot to mirage node...') env = compose_node_env( env_filepath, inited_node=True, @@ -67,4 +67,4 @@ def migrate_from_boot( print_node_cmd_error() return else: - logger.info('Mirgration from boot to mirage completed successfully') + logger.info('Migration from boot to mirage completed successfully') diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index a0cafed8..1c907b9f 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -22,8 +22,7 @@ init as init_op, init_sync as init_sync_op, init_mirage_boot as init_mirage_boot_op, - migrate_mirage_boot as migrate_mirage_boot_op, - update_mirage_boot as update_mirage_boot_op, + update_mirage_boot as update_miage_boot_op, update_sync as update_sync_op, turn_off as turn_off_op, turn_on as turn_on_op, diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 1f1ce73e..d4dd4381 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -150,47 +150,6 @@ def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: return True -@checked_host -def migrate_mirage_boot(env_filepath: str, env: Dict) -> bool: - compose_rm(node_type=NodeType.MIRAGE, env=env) - cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) - - sync_skale_node() - ensure_btrfs_kernel_module_autoloaded() - - if env.get('SKIP_DOCKER_CONFIG') != 'True': - configure_docker() - - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) - - generate_nginx_config() - - prepare_host(env_filepath, env['ENV_TYPE']) - - meta_manager = CliMetaManager() - current_stream = meta_manager.get_meta_info().config_stream - skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: - logger.info( - 'Stream version was changed from %s to %s', - current_stream, - env['CONTAINER_CONFIGS_STREAM'], - ) - docker_cleanup() - - meta_manager.update_meta( - VERSION, - env['CONTAINER_CONFIGS_STREAM'], - None, - distro.id(), - distro.version(), - ) - update_images(env=env, node_type=NodeType.MIRAGE) - compose_up(env=env, node_type=NodeType.MIRAGE) - return True - - @checked_host def update_mirage_boot(env_filepath: str, env: Dict) -> bool: compose_rm(node_type=NodeType.MIRAGE, env=env) @@ -288,11 +247,10 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: generate_nginx_config() prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') - meta_manager = CliMetaManager() + meta_manager = MirageCliMetaManager() meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], - None, distro.id(), distro.version(), ) diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py index 4504c9d3..128b1c69 100644 --- a/node_cli/operations/mirage.py +++ b/node_cli/operations/mirage.py @@ -81,7 +81,7 @@ def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) - ) docker_cleanup() - MirageCliMetaManager().update_meta( + meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], distro.id(), diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index 4b82c300..556f5002 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -2,7 +2,6 @@ import json import os from dataclasses import dataclass -from typing import Optional from node_cli.configs import META_FILEPATH @@ -107,7 +106,7 @@ def update_meta( self, version: str, config_stream: str, - docker_lvmpy_stream: Optional[str], + docker_lvmpy_stream: str | None, os_id: str, os_version: str, ) -> None: From 9d533bd539fb324159f0df3c899eb1ed8298acb4 Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 17:19:18 +0100 Subject: [PATCH 105/332] Fix import --- node_cli/core/mirage_boot.py | 3 +-- node_cli/operations/__init__.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index 5d16affa..2299bb1a 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -23,8 +23,7 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.operations import init_mirage_boot_op, migrate_mirage_boot_op, update_mirage_boot_op -from node_cli.utils.decorators import check_not_inited, check_inited, check_user +from node_cli.operations import init_mirage_boot_op, update_mirage_boot_op from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeType diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 1c907b9f..4756db58 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -22,7 +22,7 @@ init as init_op, init_sync as init_sync_op, init_mirage_boot as init_mirage_boot_op, - update_mirage_boot as update_miage_boot_op, + update_mirage_boot as update_mirage_boot_op, update_sync as update_sync_op, turn_off as turn_off_op, turn_on as turn_on_op, From f7856b0d5dfcac6428bd67478b9ab0677e829b95 Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 17:25:20 +0100 Subject: [PATCH 106/332] Fix imports --- node_cli/core/mirage_boot.py | 1 + 1 file changed, 1 insertion(+) diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index 2299bb1a..09e3679a 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -24,6 +24,7 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.operations import init_mirage_boot_op, update_mirage_boot_op +from node_cli.utils.decorators import check_not_inited, check_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeType From 91bdb51fed72a63d5c149543a33c8555bdff5a9d Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 17:38:32 +0100 Subject: [PATCH 107/332] Remove unused code --- node_cli/cli/mirage_boot.py | 17 +---------------- node_cli/core/mirage_boot.py | 23 ----------------------- 2 files changed, 1 insertion(+), 39 deletions(-) diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/mirage_boot.py index 4c783393..b7f122b9 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/mirage_boot.py @@ -20,7 +20,7 @@ import click from node_cli.core.node import get_node_signature, register_node as register, get_node_info -from node_cli.core.mirage_boot import init, migrate, update +from node_cli.core.mirage_boot import init, update from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.utils.helper import streamed_cmd, IP_TYPE, error_exit, abort_if_false @@ -76,21 +76,6 @@ def signature_boot(validator_id): print(f'Signature: {res}') -@boot.command('migrate', help='Migrate mirage node from Mirage Boot Phase to Mirage Main Phase.') -@click.option( - '--yes', - is_flag=True, - callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to mirage node from Mirage Boot Phase to Mirage Main Phase?', -) -@click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) -@click.argument('env_file') -@streamed_cmd -def migrate_boot(env_file, pull_config_for_schain): - migrate(env_file, pull_config_for_schain) - - @boot.command('update', help='Update Mirage node from .env file') @click.option( '--yes', diff --git a/node_cli/core/mirage_boot.py b/node_cli/core/mirage_boot.py index 09e3679a..5d6586b9 100644 --- a/node_cli/core/mirage_boot.py +++ b/node_cli/core/mirage_boot.py @@ -50,29 +50,6 @@ def init(env_filepath: str) -> None: logger.info('Init mirage procedure finished') -@check_inited -@check_user -def migrate(env_filepath: str, pull_config_for_schain: str) -> None: - logger.info('Mirage node migration started') - env = compose_node_env( - env_filepath, - inited_node=True, - sync_schains=False, - pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.MIRAGE, - ) - migrate_ok = migrate_mirage_boot_op(env_filepath, env) - if migrate_ok: - logger.info('Waiting for containers initialization') - time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=NodeType.MIRAGE) - if not migrate_ok or not alive: - print_node_cmd_error() - return - else: - logger.info('Node migration from Mirage Boot to Mirage Main finished successfully!') - - @check_inited @check_user def update(env_filepath: str, pull_config_for_schain: str) -> None: From 7271ace6359a11f8f024033b9c9bec7d4352f580 Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 18:38:43 +0100 Subject: [PATCH 108/332] Fix tests --- tests/cli/mirage_cli_test.py | 20 ------------ tests/configs/configs_env_validate_test.py | 4 +-- tests/core/core_mirage_test.py | 37 +--------------------- 3 files changed, 3 insertions(+), 58 deletions(-) diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py index e8400219..5072bd40 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/mirage_cli_test.py @@ -15,7 +15,6 @@ init_boot, register_boot, signature_boot, - migrate_boot, ) @@ -150,25 +149,6 @@ def test_mirage_boot_init(mock_init_core, valid_env_file): mock_init_core.assert_called_once_with(valid_env_file) -@mock.patch('node_cli.cli.mirage_boot.migrate') -def test_mirage_boot_migrate(mock_migrate_core, valid_env_file): - runner = CliRunner() - result = runner.invoke(migrate_boot, ['--yes', valid_env_file]) - - assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' - mock_migrate_core.assert_called_once_with(valid_env_file, None) - - -@mock.patch('node_cli.cli.mirage_boot.migrate') -def test_mirage_boot_migrate_pull_config(mock_migrate_core, valid_env_file): - runner = CliRunner() - schain_name = 'my-schain-config' - result = runner.invoke(migrate_boot, ['--yes', '--pull-config', schain_name, valid_env_file]) - - assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' - mock_migrate_core.assert_called_once_with(valid_env_file, schain_name) - - @mock.patch('node_cli.cli.mirage_node.migrate_from_boot') def test_mirage_node_migrate(mock_migrate_core, valid_env_file): runner = CliRunner() diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 449b9a4e..a9703b38 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -18,7 +18,7 @@ MirageUserConfig, SkaleUserConfig, SyncUserConfig, - get_user_config_type, + get_user_config_class, get_validated_user_config, validate_env_type, ) @@ -47,7 +47,7 @@ def json(self): ids=['regular', 'sync', 'mirage_boot', 'mirage_regular'], ) def test_build_env_params_keys(node_type, is_mirage_boot, expected_type): - env_type = get_user_config_type(node_type=node_type, is_mirage_boot=is_mirage_boot) + env_type = get_user_config_class(node_type=node_type, is_mirage_boot=is_mirage_boot) assert env_type == expected_type diff --git a/tests/core/core_mirage_test.py b/tests/core/core_mirage_test.py index 171d4fed..e4e0519c 100644 --- a/tests/core/core_mirage_test.py +++ b/tests/core/core_mirage_test.py @@ -1,7 +1,7 @@ from unittest import mock from node_cli.configs import SKALE_DIR -from node_cli.core.mirage_boot import init as init_boot, migrate, update +from node_cli.core.mirage_boot import init as init_boot, update from node_cli.core.mirage_node import restore_mirage, migrate_from_boot from node_cli.operations.mirage import MirageUpdateType from node_cli.utils.node_type import NodeType @@ -60,41 +60,6 @@ def test_init_mirage_boot( mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) -@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.core.mirage_boot.time.sleep') -@mock.patch('node_cli.core.mirage_boot.migrate_mirage_boot_op') -@mock.patch('node_cli.core.mirage_boot.compose_node_env') -def test_migrate_mirage_boot( - mock_compose_env, - mock_migrate_op, - mock_sleep, - mock_is_alive, - mock_is_user_valid, - valid_env_file, - inited_node, - resource_alloc, - meta_file_v3, -): - mock_env = {'ENV_TYPE': 'devnet'} - mock_compose_env.return_value = mock_env - mock_migrate_op.return_value = True - pull_config_for_schain = 'mirage' - - migrate(valid_env_file, pull_config_for_schain) - - mock_compose_env.assert_called_once_with( - valid_env_file, - inited_node=True, - sync_schains=False, - pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.MIRAGE, - ) - mock_migrate_op.assert_called_once_with(valid_env_file, mock_env) - mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE) - - @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) @mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) @mock.patch('node_cli.core.mirage_boot.time.sleep') From 1a95aff810c3e13118922960de0b848b6c098bf5 Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 17 Jun 2025 19:45:29 +0100 Subject: [PATCH 109/332] Remove unnecessary code --- node_cli/configs/user.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 7ffc338a..dc393e53 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -149,13 +149,8 @@ def get_validated_user_config( def validate_user_config(user_config: BaseUserConfig) -> None: validate_env_type(env_type=user_config.env_type) - if isinstance(user_config, MirageUserConfig): - contract_alias_or_address = user_config.mirage_contracts - endpoint = user_config.boot_endpoint - else: - contract_alias_or_address = user_config.manager_contracts - endpoint = user_config.endpoint - validate_alias_or_address(contract_alias_or_address, ContractType.MANAGER, endpoint) + if not isinstance(user_config, MirageUserConfig): + validate_alias_or_address(user_config.manager_contracts, ContractType.MANAGER, user_config.endpoint) if isinstance(user_config, (SkaleUserConfig, MirageBootUserConfig)): validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, endpoint) From 364d48cf41063cd6a732bf8166d8ed191acd5616 Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 18 Jun 2025 20:31:50 +0100 Subject: [PATCH 110/332] Add firewall migration from boot --- node_cli/configs/user.py | 6 +++-- node_cli/core/mirage_node.py | 2 +- node_cli/migrations/mirage/from_boot.py | 34 +++++++++++++++++++++++++ node_cli/operations/mirage.py | 8 +++++- 4 files changed, 46 insertions(+), 4 deletions(-) create mode 100644 node_cli/migrations/mirage/from_boot.py diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index dc393e53..f1d07cc6 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -150,10 +150,12 @@ def validate_user_config(user_config: BaseUserConfig) -> None: validate_env_type(env_type=user_config.env_type) if not isinstance(user_config, MirageUserConfig): - validate_alias_or_address(user_config.manager_contracts, ContractType.MANAGER, user_config.endpoint) + validate_alias_or_address( + user_config.manager_contracts, ContractType.MANAGER, user_config.endpoint + ) if isinstance(user_config, (SkaleUserConfig, MirageBootUserConfig)): - validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, endpoint) + validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, user_config.endpoint) def to_lower_keys(params: Dict[str, str]) -> Dict[str, str]: diff --git a/node_cli/core/mirage_node.py b/node_cli/core/mirage_node.py index 5cde18e4..5096aebf 100644 --- a/node_cli/core/mirage_node.py +++ b/node_cli/core/mirage_node.py @@ -61,7 +61,7 @@ def migrate_from_boot( sync_schains=False, node_type=NodeType.MIRAGE, ) - migrate_ok = update_mirage_op(env_filepath, env, update_type=MirageUpdateType.INFRA_ONLY) + migrate_ok = update_mirage_op(env_filepath, env, update_type=MirageUpdateType.FROM_BOOT) alive = is_base_containers_alive(node_type=NodeType.MIRAGE) if not migrate_ok or not alive: print_node_cmd_error() diff --git a/node_cli/migrations/mirage/from_boot.py b/node_cli/migrations/mirage/from_boot.py new file mode 100644 index 00000000..a795f971 --- /dev/null +++ b/node_cli/migrations/mirage/from_boot.py @@ -0,0 +1,34 @@ +import glob +import logging +import os +from pathlib import Path + +from node_cli.utils.helper import run_cmd + +logger = logging.getLogger(__name__) + +NFT_CHAIN_BASE_PATH = '/etc/nft.conf.d/skale/chains' +NFT_COMMITTEE_SCOPE_CHAIN_PATH = 'mirage-committee.conf' + + +class NoLegacyNFTChainConfigError(Exception): + pass + + +def move_chain_to_new_name() -> None: + after_boot_chain_path = glob.glob(os.path.join(NFT_CHAIN_BASE_PATH, '*'))[0] + new_path = os.path.join(NFT_CHAIN_BASE_PATH, NFT_COMMITTEE_SCOPE_CHAIN_PATH) + if Path(after_boot_chain_path).exists(): + raise NoLegacyNFTChainConfigError(f'File {after_boot_chain_path} does not exists') + Path(after_boot_chain_path).rename(Path(new_path)) + + +def reload_nft(): + run_cmd(['nft', '-f', '/etc/nftables.conf']) + + +def migrate_nftables_from_boot(): + logger.info('Starting nftables migration from boot') + move_chain_to_new_name() + logger.info('Reloading nftables rules') + reload_nft() diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py index 128b1c69..84b2843c 100644 --- a/node_cli/operations/mirage.py +++ b/node_cli/operations/mirage.py @@ -30,6 +30,7 @@ from node_cli.core.host import ensure_btrfs_kernel_module_autoloaded, link_env_file, prepare_host from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config +from node_cli.migrations.mirage.from_boot import migrate_nftables_from_boot from node_cli.operations.base import checked_host from node_cli.operations.common import unpack_backup_archive from node_cli.operations.config_repo import ( @@ -53,13 +54,15 @@ class MirageUpdateType(Enum): REGULAR = 'regular' INFRA_ONLY = 'infra_only' + FROM_BOOT = 'from_boot' @checked_host def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) -> bool: compose_rm(node_type=NodeType.MIRAGE, env=env) - if update_type != MirageUpdateType.INFRA_ONLY: + if update_type in (MirageUpdateType.INFRA_ONLY, MirageUpdateType.FROM_BOOT): remove_dynamic_containers() + sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -87,6 +90,9 @@ def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) - distro.id(), distro.version(), ) + + if update_type == MirageUpdateType.FROM_BOOT: + migrate_nftables_from_boot() update_images(env=env, node_type=NodeType.MIRAGE) compose_up(env=env, node_type=NodeType.MIRAGE) return True From a1c51b65824f4fc813fffa1709c4ac2ba4fcee59 Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 20 Jun 2025 16:42:07 +0100 Subject: [PATCH 111/332] Fix firewall migrate --- node_cli/migrations/mirage/from_boot.py | 39 ++++++++++++++++++++----- node_cli/operations/mirage.py | 2 +- 2 files changed, 33 insertions(+), 8 deletions(-) diff --git a/node_cli/migrations/mirage/from_boot.py b/node_cli/migrations/mirage/from_boot.py index a795f971..04578e68 100644 --- a/node_cli/migrations/mirage/from_boot.py +++ b/node_cli/migrations/mirage/from_boot.py @@ -3,24 +3,47 @@ import os from pathlib import Path +from node_cli.core.docker_config import restart_docker_service from node_cli.utils.helper import run_cmd logger = logging.getLogger(__name__) NFT_CHAIN_BASE_PATH = '/etc/nft.conf.d/skale/chains' -NFT_COMMITTEE_SCOPE_CHAIN_PATH = 'mirage-committee.conf' +NFT_COMMITTEE_SCOPE_CHAIN_NAME = 'mirage-committee' class NoLegacyNFTChainConfigError(Exception): pass -def move_chain_to_new_name() -> None: +def rename_chain_file(old_filepath: str, new_filepath: str) -> None: + old_path = Path(old_filepath) + new_path = Path(new_filepath) + if not old_path.exists(): + raise NoLegacyNFTChainConfigError(f'File {old_filepath} does not exists') + old_path.rename(Path(new_path)) + + +def rename_chain_in_config(config_path: str, old_chain_name: str, new_chain_name: str) -> None: + content = '' + with open(config_path, 'r') as f: + content = f.read() + + updated_content = content.replace(old_chain_name, new_chain_name) + + with open(config_path, 'w') as f: + f.write(updated_content) + + +def migrate_nft_chain() -> None: after_boot_chain_path = glob.glob(os.path.join(NFT_CHAIN_BASE_PATH, '*'))[0] - new_path = os.path.join(NFT_CHAIN_BASE_PATH, NFT_COMMITTEE_SCOPE_CHAIN_PATH) - if Path(after_boot_chain_path).exists(): - raise NoLegacyNFTChainConfigError(f'File {after_boot_chain_path} does not exists') - Path(after_boot_chain_path).rename(Path(new_path)) + old_chain_name = Path(after_boot_chain_path).name.removesuffix('.conf') + new_chain_name = NFT_COMMITTEE_SCOPE_CHAIN_NAME + rename_chain_in_config(after_boot_chain_path, old_chain_name, new_chain_name) + after_migration_chain_path = os.path.join( + NFT_CHAIN_BASE_PATH, f'{NFT_COMMITTEE_SCOPE_CHAIN_NAME}.conf' + ) + rename_chain_file(after_boot_chain_path, after_migration_chain_path) def reload_nft(): @@ -29,6 +52,8 @@ def reload_nft(): def migrate_nftables_from_boot(): logger.info('Starting nftables migration from boot') - move_chain_to_new_name() + migrate_nft_chain() logger.info('Reloading nftables rules') reload_nft() + logger.info('Restart docker service') + restart_docker_service() diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py index 84b2843c..ed3dce78 100644 --- a/node_cli/operations/mirage.py +++ b/node_cli/operations/mirage.py @@ -60,7 +60,7 @@ class MirageUpdateType(Enum): @checked_host def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) -> bool: compose_rm(node_type=NodeType.MIRAGE, env=env) - if update_type in (MirageUpdateType.INFRA_ONLY, MirageUpdateType.FROM_BOOT): + if update_type not in (MirageUpdateType.INFRA_ONLY, MirageUpdateType.FROM_BOOT): remove_dynamic_containers() sync_skale_node() From 0eae0362605ac170a729eaa36630f1277f7c36c7 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 23 Jun 2025 12:59:14 +0100 Subject: [PATCH 112/332] Fix tests --- node_cli/cli/mirage_node.py | 16 ++++++++---- tests/mirage/mirage_node_test.py | 42 ++------------------------------ 2 files changed, 13 insertions(+), 45 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index d87a9cbe..e961f719 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -19,14 +19,14 @@ import click -from node_cli.core.node import get_node_signature, backup, get_node_info +from node_cli.core.node import backup, get_node_info, get_node_signature from node_cli.mirage.mirage_node import migrate_from_boot, request_repair, restore_mirage -from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false -from node_cli.utils.helper import URL_TYPE +from node_cli.utils.helper import URL_TYPE, abort_if_false, error_exit, streamed_cmd from node_cli.utils.texts import safe_load_texts TEXTS = safe_load_texts() + @click.group() def mirage_node_cli(): pass @@ -111,8 +111,7 @@ def restore_node(backup_path, env_file, config_only): @streamed_cmd def migrate_node(env_filepath: str) -> None: migrate_from_boot(env_filepath=env_filepath) - prompt=TEXTS['mirage']['node']['repair']['warning'], -) + @node.command('repair', help='Toggle mirage chain repair mode') @click.option( @@ -122,5 +121,12 @@ def migrate_node(env_filepath: str) -> None: hidden=True, help=TEXTS['mirage']['node']['repair']['snapshot_from'], ) +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt=TEXTS['mirage']['node']['repair']['warning'], +) def repair(snapshot_from: str = '') -> None: request_repair(snapshot_from=snapshot_from) diff --git a/tests/mirage/mirage_node_test.py b/tests/mirage/mirage_node_test.py index d7b75c78..6dbb2a7f 100644 --- a/tests/mirage/mirage_node_test.py +++ b/tests/mirage/mirage_node_test.py @@ -3,12 +3,9 @@ import freezegun from node_cli.configs import SKALE_DIR -from node_cli.core.mirage_boot import init as init_boot -from node_cli.core.mirage_boot import update -from node_cli.core.mirage_node import migrate_from_boot, restore_mirage from node_cli.mirage.mirage_boot import init as init_boot -from node_cli.mirage.mirage_boot import migrate, update -from node_cli.mirage.mirage_node import request_repair, restore_mirage +from node_cli.mirage.mirage_boot import update +from node_cli.mirage.mirage_node import migrate_from_boot, request_repair, restore_mirage from node_cli.operations.mirage import MirageUpdateType from node_cli.utils.node_type import NodeType from tests.helper import CURRENT_DATETIME, CURRENT_TIMESTAMP @@ -67,41 +64,6 @@ def test_init_mirage_boot( mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) -@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.core.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.core.mirage_boot.time.sleep') -@mock.patch('node_cli.core.mirage_boot.update_mirage_boot_op') -@mock.patch('node_cli.core.mirage_boot.compose_node_env') -def test_migrate_mirage_boot( - mock_compose_env, - mock_migrate_op, - mock_sleep, - mock_is_alive, - mock_is_user_valid, - valid_env_file, - inited_node, - resource_alloc, - meta_file_v3, -): - mock_env = {'ENV_TYPE': 'devnet'} - mock_compose_env.return_value = mock_env - mock_migrate_op.return_value = True - pull_config_for_schain = 'mirage' - - migrate(valid_env_file, pull_config_for_schain) - - mock_compose_env.assert_called_once_with( - valid_env_file, - inited_node=True, - sync_schains=False, - pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.MIRAGE, - ) - mock_migrate_op.assert_called_once_with(valid_env_file, mock_env) - mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE) - - @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) @mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) @mock.patch('node_cli.mirage.mirage_boot.time.sleep') From 88471934395318db0b1658b21544dbf71e360748 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 23 Jun 2025 13:22:12 +0100 Subject: [PATCH 113/332] Fix tests --- tests/conftest.py | 14 +++++++------- tests/mirage/mirage_node_test.py | 6 +++--- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9a81fdc6..c6f71fac 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -19,9 +19,9 @@ import json import os -import tempfile import pathlib import shutil +import tempfile from contextlib import contextmanager import docker @@ -34,18 +34,17 @@ GLOBAL_SKALE_CONF_FILEPATH, GLOBAL_SKALE_DIR, META_FILEPATH, + NGINX_CONFIG_FILEPATH, NGINX_CONTAINER_NAME, + REDIS_URI, REMOVED_CONTAINERS_FOLDER_PATH, SCHAIN_NODE_DATA_PATH, - NGINX_CONFIG_FILEPATH, - REDIS_URI, ) from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH -from node_cli.configs.ssl import SSL_FOLDER_PATH from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH +from node_cli.configs.ssl import SSL_FOLDER_PATH from node_cli.utils.docker_utils import docker_client from node_cli.utils.global_config import generate_g_config_file - from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3, TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN @@ -385,8 +384,9 @@ def sync_user_conf(tmp_path): yield test_env_path finally: test_env_path.unlink() - - + + +@pytest.fixture def redis_client(): cpool = redis.ConnectionPool.from_url(REDIS_URI) return redis.Redis(connection_pool=cpool) diff --git a/tests/mirage/mirage_node_test.py b/tests/mirage/mirage_node_test.py index 6dbb2a7f..b81b91ef 100644 --- a/tests/mirage/mirage_node_test.py +++ b/tests/mirage/mirage_node_test.py @@ -100,8 +100,8 @@ def test_update_mirage_boot( mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) -@mock.patch('node_cli.core.mirage_node.update_mirage_op') -@mock.patch('node_cli.core.mirage_node.compose_node_env') +@mock.patch('node_cli.mirage.mirage_node.update_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) def test_migrate_from_boot( mock_is_user_valid, @@ -125,7 +125,7 @@ def test_migrate_from_boot( node_type=NodeType.MIRAGE, ) mock_migrate_op.assert_called_once_with( - valid_env_file, mock_env, update_type=MirageUpdateType.INFRA_ONLY + valid_env_file, mock_env, update_type=MirageUpdateType.FROM_BOOT ) From 036d884695247f472fb0cd2456f9dacae0840a3c Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 23 Jun 2025 13:54:44 +0100 Subject: [PATCH 114/332] Add migrate for mirage chain record --- node_cli/core/static_config.py | 6 ++++++ node_cli/mirage/mirage_node.py | 10 +++------- node_cli/mirage/record/chain_record.py | 21 ++++++++++++++++++++- node_cli/operations/mirage.py | 3 +++ setup.py | 2 +- tests/cli/node_test.py | 17 ++++++++++------- tests/cli/resources_allocation_test.py | 6 +++--- tests/cli/sync_node_test.py | 4 +++- 8 files changed, 49 insertions(+), 20 deletions(-) diff --git a/node_cli/core/static_config.py b/node_cli/core/static_config.py index 721251ce..4bc84f42 100644 --- a/node_cli/core/static_config.py +++ b/node_cli/core/static_config.py @@ -44,3 +44,9 @@ def get_static_params( with open(static_params_filepath) as requirements_file: ydata = yaml.load(requirements_file, Loader=yaml.Loader) return ydata['envs'][env_type] + + +def get_mirage_chain_name(env: dict) -> str: + node_type = NodeType.MIRAGE + params = get_static_params(node_type, env['ENV_TYPE']) + return params['info']['chain_name'] diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index 31f229e8..44ba9c0a 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -25,8 +25,7 @@ from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.core.host import save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.core.static_config import get_static_params -from node_cli.mirage.record.chain_record import ChainRecord +from node_cli.mirage.record.chain_record import get_mirage_chain_record from node_cli.operations import MirageUpdateType, restore_mirage_op, update_mirage_op from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes @@ -76,11 +75,8 @@ def migrate_from_boot( def request_repair(snapshot_from: str = '') -> None: - node_type = NodeType.MIRAGE - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type) - params = get_static_params(node_type, env['ENV_TYPE']) - record = ChainRecord(params['info']['chain_name']) + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE) + record = get_mirage_chain_record(env) record.set_repair_ts(int(time.time())) record.set_snapshot_from(snapshot_from) - print(TEXTS['mirage']['node']['repair']['repair_requested']) diff --git a/node_cli/mirage/record/chain_record.py b/node_cli/mirage/record/chain_record.py index 34968be4..36e0dbeb 100644 --- a/node_cli/mirage/record/chain_record.py +++ b/node_cli/mirage/record/chain_record.py @@ -18,11 +18,11 @@ # along with this program. If not, see . - import logging from typing import cast from datetime import datetime +from node_cli.core.static_config import get_mirage_chain_name from node_cli.mirage.record.redis_record import FlatRedisRecord, FieldInfo logger = logging.getLogger(__name__) @@ -30,6 +30,7 @@ CHAIN_RECORD_FIELDS: dict[str, FieldInfo] = { 'name': FieldInfo('name', str, ''), + 'config_version': FieldInfo('config_version', str, '0.0.0'), 'repair_date': FieldInfo('repair_date', datetime, datetime.fromtimestamp(0)), 'repair_ts': FieldInfo('repair_ts', int, None), 'snapshot_from': FieldInfo('snapshot_from', str, None), @@ -40,6 +41,10 @@ class ChainRecord(FlatRedisRecord): def _record_fields(self) -> dict[str, FieldInfo]: return CHAIN_RECORD_FIELDS + @property + def config_version(self) -> str: + return cast(str, self._get_field('config_version')) + @property def repair_date(self) -> datetime: return cast(datetime, self._get_field('repair_date')) @@ -52,6 +57,9 @@ def snapshot_from(self) -> str | None: def repair_ts(self) -> int | None: return cast(int | None, self._get_field('repair_ts')) + def set_config_version(self, version: str) -> None: + self._set_field('config_version', version) + def set_repair_date(self, date: datetime) -> None: self._set_field('repair_date', date) @@ -60,3 +68,14 @@ def set_snapshot_from(self, value: str | None) -> None: def set_repair_ts(self, value: int | None) -> None: self._set_field('repair_ts', value) + + +def get_mirage_chain_record(env: dict) -> ChainRecord: + return ChainRecord(get_mirage_chain_name(env)) + + +def migrate_chain_record(env: dict) -> None: + version = env['CONTAINER_CONFIGS_STREAM'] + logger.info('Migrating mirage chain record, setting config version to %s', version) + record = get_mirage_chain_record(env) + record.set_config_version(version) diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py index ed3dce78..4329dd99 100644 --- a/node_cli/operations/mirage.py +++ b/node_cli/operations/mirage.py @@ -31,6 +31,7 @@ from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config from node_cli.migrations.mirage.from_boot import migrate_nftables_from_boot +from node_cli.mirage.record.chain_record import migrate_chain_record from node_cli.operations.base import checked_host from node_cli.operations.common import unpack_backup_archive from node_cli.operations.config_repo import ( @@ -93,6 +94,8 @@ def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) - if update_type == MirageUpdateType.FROM_BOOT: migrate_nftables_from_boot() + migrate_chain_record(env) + update_images(env=env, node_type=NodeType.MIRAGE) compose_up(env=env, node_type=NodeType.MIRAGE) return True diff --git a/setup.py b/setup.py index ad03a8b4..5ea3e6ce 100644 --- a/setup.py +++ b/setup.py @@ -71,7 +71,7 @@ def find_version(*file_paths): 'sh==1.14.2', 'python-crontab==2.6.0', 'requests-mock==1.12.1', - 'redis==6.2.0' + 'redis==6.2.0', ], python_requires='>=3.8,<4', extras_require=extras_require, diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index fa966858..60344b71 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -322,11 +322,14 @@ def test_backup(): assert 'Backup archive succesfully created ' in result.output -@pytest.mark.parametrize("node_type,test_user_conf", [ - (NodeType.REGULAR, "regular_user_conf"), - (NodeType.MIRAGE, "mirage_user_conf"), - (NodeType.SYNC, "sync_user_conf") -]) +@pytest.mark.parametrize( + 'node_type,test_user_conf', + [ + (NodeType.REGULAR, 'regular_user_conf'), + (NodeType.MIRAGE, 'mirage_user_conf'), + (NodeType.SYNC, 'sync_user_conf'), + ], +) def test_restore(request, node_type, test_user_conf, mocked_g_config, tmp_path): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) result = run_command(backup_node, [tmp_path]) @@ -392,7 +395,7 @@ def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf): mock.patch('node_cli.core.node.turn_off_op'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR) + mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', @@ -425,7 +428,7 @@ def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf): mock.patch('node_cli.core.node.is_base_containers_alive'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR) + mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', diff --git a/tests/cli/resources_allocation_test.py b/tests/cli/resources_allocation_test.py index bdb8d8f0..4f59daea 100644 --- a/tests/cli/resources_allocation_test.py +++ b/tests/cli/resources_allocation_test.py @@ -60,7 +60,7 @@ def test_generate(regular_user_conf): 'node_cli.utils.helper.post_request', resp_mock, generate, - [regular_user_conf.as_posix(), '--yes'] + [regular_user_conf.as_posix(), '--yes'], ) assert result.output == ( f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}\n' @@ -79,7 +79,7 @@ def test_generate_already_exists(regular_user_conf, resource_alloc_config): 'node_cli.utils.helper.post_request', resp_mock, generate, - [regular_user_conf.as_posix(), '--yes'] + [regular_user_conf.as_posix(), '--yes'], ) assert result.output == 'Resource allocation file already exists\n' assert result.exit_code == 0 @@ -88,7 +88,7 @@ def test_generate_already_exists(regular_user_conf, resource_alloc_config): 'node_cli.utils.helper.post_request', resp_mock, generate, - [regular_user_conf.as_posix(), '--yes', '--force'] + [regular_user_conf.as_posix(), '--yes', '--force'], ) assert result.output == ( f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}\n' diff --git a/tests/cli/sync_node_test.py b/tests/cli/sync_node_test.py index 486b30e2..014510e1 100644 --- a/tests/cli/sync_node_test.py +++ b/tests/cli/sync_node_test.py @@ -56,6 +56,7 @@ def test_init_sync(mocked_g_config, clean_node_options, sync_user_conf): assert result.exit_code == 0 + def test_init_sync_archive(mocked_g_config, clean_node_options, sync_user_conf): pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) with ( @@ -77,7 +78,7 @@ def test_init_sync_archive(mocked_g_config, clean_node_options, sync_user_conf): mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.cli.node.TYPE', NodeType.SYNC) + mock.patch('node_cli.cli.node.TYPE', NodeType.SYNC), ): result = run_command(_init_sync, [sync_user_conf.as_posix(), '--archive']) node_options = NodeOptions() @@ -105,6 +106,7 @@ def test_init_archive_indexer_fail(mocked_g_config, clean_node_options): assert result.exit_code == 1 assert 'Cannot use both' in result.output + def test_update_sync(sync_user_conf, mocked_g_config): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) From 27e5e48f18e9e26a858c029ccd350d74073040f9 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 23 Jun 2025 17:13:10 +0100 Subject: [PATCH 115/332] Add redis service startup during mirage node update --- node_cli/operations/mirage.py | 12 +++++++++++- node_cli/utils/docker_utils.py | 36 +++++++++++++++++++++++++++------- 2 files changed, 40 insertions(+), 8 deletions(-) diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py index 4329dd99..91b81a16 100644 --- a/node_cli/operations/mirage.py +++ b/node_cli/operations/mirage.py @@ -17,6 +17,7 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import time import logging from enum import Enum @@ -39,11 +40,14 @@ update_images, ) from node_cli.utils.docker_utils import ( + REDIS_SERVICE_DICT, + REDIS_START_TIMEOUT, NodeType, compose_rm, compose_up, docker_cleanup, remove_dynamic_containers, + wait_for_container, ) from node_cli.utils.helper import str_to_bool from node_cli.utils.meta import MirageCliMetaManager @@ -94,9 +98,15 @@ def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) - if update_type == MirageUpdateType.FROM_BOOT: migrate_nftables_from_boot() - migrate_chain_record(env) update_images(env=env, node_type=NodeType.MIRAGE) + + compose_up(env=env, node_type=NodeType.MIRAGE, services=list(REDIS_SERVICE_DICT)) + wait_for_container(REDIS_SERVICE_DICT['redis']) + time.sleep(REDIS_START_TIMEOUT) + if update_type == MirageUpdateType.FROM_BOOT: + migrate_chain_record(env) + compose_up(env=env, node_type=NodeType.MIRAGE) return True diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index d6438253..8bf37e78 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -17,15 +17,17 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import os import io +import time import itertools -import os import logging from typing import Optional import docker from docker.client import DockerClient from docker.models.containers import Container +from docker.errors import NotFound from node_cli.utils.helper import run_cmd, str_to_bool from node_cli.configs import ( @@ -44,8 +46,10 @@ SCHAIN_REMOVE_TIMEOUT = 300 IMA_REMOVE_TIMEOUT = 20 TELEGRAF_REMOVE_TIMEOUT = 20 +REDIS_START_TIMEOUT = 10 + +REDIS_SERVICE_DICT = {'redis': 'skale_redis'} -# Services have format : CORE_COMMON_COMPOSE_SERVICES = { 'transaction-manager': 'skale_transaction-manager', 'redis': 'skale_redis', @@ -245,7 +249,7 @@ def is_volume_exists(name: str, dutils=None): dutils = dutils or docker_client() try: dutils.volumes.get(name) - except docker.errors.NotFound: + except NotFound: return False return True @@ -300,7 +304,7 @@ def get_compose_services(node_type: NodeType) -> list[str]: return result -def get_up_compose_cmd(node_type: NodeType, services: Optional[list[str]] = None) -> tuple: +def get_up_compose_cmd(node_type: NodeType, services: list[str] | None = None) -> tuple: compose_path = get_compose_path(node_type) if services is None: @@ -309,7 +313,9 @@ def get_up_compose_cmd(node_type: NodeType, services: Optional[list[str]] = None return ('docker', 'compose', '-f', compose_path, 'up', '-d', *services) -def compose_up(env, node_type: NodeType, is_mirage_boot: bool = False): +def compose_up( + env, node_type: NodeType, is_mirage_boot: bool = False, services: list[str] | None = None +): if node_type == NodeType.SYNC: logger.info('Running containers for sync node') run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.SYNC), env=env) @@ -322,7 +328,7 @@ def compose_up(env, node_type: NodeType, is_mirage_boot: bool = False): logger.info('Running mirage base set of containers') if not is_mirage_boot: logger.debug('Launching mirage containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.MIRAGE), env=env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.MIRAGE, services=services), env=env) else: logger.debug('Launching mirage boot containers with env %s', env) run_cmd( @@ -385,7 +391,7 @@ def is_container_running(name: str, dclient: Optional[DockerClient] = None) -> b try: container = dc.containers.get(name) return container.status == 'running' - except docker.errors.NotFound: + except NotFound: return False @@ -421,3 +427,19 @@ def docker_cleanup(dclient=None, ignore=None): system_prune() except Exception as e: logger.warning('Image cleanup errored with %s', e) + + +def wait_for_container(container_name: str, attempts: int = 10, interval: int = 3) -> bool: + logger.info('Waiting for container %s to be up', container_name) + dc = docker_client() + + for i in range(attempts): + try: + container = dc.containers.get(container_name) + if container.status == 'running': + logger.info('Container %s is up', container_name) + return True + except NotFound: + logger.warning('Container %s not found, retrying...', container_name) + time.sleep(interval) + return False From 48ddb439c2584812e3a7f9c68ec59f8a0766c187 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 23 Jun 2025 19:00:40 +0100 Subject: [PATCH 116/332] Fix repair test --- tests/mirage/mirage_node_test.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/mirage/mirage_node_test.py b/tests/mirage/mirage_node_test.py index b81b91ef..0b6ed316 100644 --- a/tests/mirage/mirage_node_test.py +++ b/tests/mirage/mirage_node_test.py @@ -131,10 +131,8 @@ def test_migrate_from_boot( @freezegun.freeze_time(CURRENT_DATETIME) @mock.patch('node_cli.mirage.mirage_node.compose_node_env', return_value={'ENV_TYPE': 'devnet'}) -@mock.patch( - 'node_cli.mirage.mirage_node.get_static_params', return_value={'info': {'chain_name': 'test'}} -) -def test_mirage_repair(compsoe_node_env_mock, get_statis_params_mock, redis_client, inited_node): +@mock.patch('node_cli.mirage.record.chain_record.get_mirage_chain_name', return_value='test') +def test_mirage_repair(compose_node_env_mock, get_static_params_mock, redis_client, inited_node): request_repair() assert redis_client.get('test_repair_ts') == f'{CURRENT_TIMESTAMP}'.encode('utf-8') assert redis_client.get('test_snapshot_from') == b'' From 77342770f2ac27ee730e5ec6db4884c02d09e58d Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 23 Jun 2025 20:03:00 +0100 Subject: [PATCH 117/332] Add mirage cleanup command --- node_cli/cli/mirage_node.py | 20 +++- node_cli/core/docker_config.py | 59 +++++++++++- node_cli/core/schains.py | 19 +++- node_cli/mirage/mirage_node.py | 23 ++++- node_cli/operations/__init__.py | 6 ++ node_cli/operations/base.py | 26 ++++-- node_cli/operations/common.py | 9 +- node_cli/operations/mirage.py | 158 ++++++++++++++++++++++++++++++++ tests/core/core_schains_test.py | 9 +- 9 files changed, 294 insertions(+), 35 deletions(-) create mode 100644 node_cli/operations/mirage.py diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index af4d8455..65f4d9bd 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -19,9 +19,10 @@ import click -from node_cli.core.node import get_node_signature, backup, get_node_info -from node_cli.mirage.mirage_node import restore_mirage, request_repair -from node_cli.utils.helper import error_exit, streamed_cmd, abort_if_false, URL_TYPE +from node_cli.core.node import backup, get_node_info, get_node_signature +from node_cli.mirage.mirage_node import cleanup as mirage_cleanup +from node_cli.mirage.mirage_node import request_repair, restore_mirage +from node_cli.utils.helper import URL_TYPE, abort_if_false, error_exit, streamed_cmd from node_cli.utils.texts import safe_load_texts TEXTS = safe_load_texts() @@ -116,3 +117,16 @@ def restore_node(backup_path, env_file, config_only): ) def repair(snapshot_from: str = '') -> None: request_repair(snapshot_from=snapshot_from) + + +@node.command('cleanup', help='Cleanup Mirage node.') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to cleanup Mirage node?', +) +@streamed_cmd +def cleanup_node(): + mirage_cleanup() diff --git a/node_cli/core/docker_config.py b/node_cli/core/docker_config.py index d0f07051..15d6d9de 100644 --- a/node_cli/core/docker_config.py +++ b/node_cli/core/docker_config.py @@ -1,5 +1,5 @@ -import grp import enum +import grp import json import logging import os @@ -8,19 +8,17 @@ import typing from typing import Optional, Tuple - from node_cli.configs import ( - DOCKER_DEAMON_CONFIG_PATH, DOCKER_DAEMON_HOSTS, + DOCKER_DEAMON_CONFIG_PATH, DOCKER_SERVICE_CONFIG_DIR, DOCKER_SERVICE_CONFIG_PATH, DOCKER_SOCKET_PATH, NODE_DOCKER_CONFIG_PATH, SKALE_RUN_DIR, ) -from node_cli.utils.helper import run_cmd from node_cli.utils.docker_utils import docker_client, get_containers - +from node_cli.utils.helper import run_cmd logger = logging.getLogger(__name__) @@ -184,3 +182,54 @@ def configure_docker() -> None: save_docker_group_id(group_id) logger.info('Docker configuration finished') + + +def remove_docker_service_override_config() -> None: + if os.path.isfile(DOCKER_SERVICE_CONFIG_PATH): + logger.info('Removing docker service override config') + os.remove(DOCKER_SERVICE_CONFIG_PATH) + + +def reset_docker_daemon_config() -> None: + if os.path.isfile(DOCKER_DEAMON_CONFIG_PATH): + logger.info('Resetting docker daemon config') + with open(DOCKER_DEAMON_CONFIG_PATH, 'r') as daemon_config: + config = json.load(daemon_config) + + # Remove the keys we added + config.pop('live-restore', None) + config.pop('hosts', None) + + if config: + # Write back remaining config + with open(DOCKER_DEAMON_CONFIG_PATH, 'w') as daemon_config: + json.dump(config, daemon_config) + else: + # Remove file if empty + os.remove(DOCKER_DEAMON_CONFIG_PATH) + + +def remove_node_docker_config() -> None: + if os.path.isfile(NODE_DOCKER_CONFIG_PATH): + logger.info('Removing node docker config') + os.remove(NODE_DOCKER_CONFIG_PATH) + + +def remove_skale_run_dir() -> None: + if os.path.isdir(SKALE_RUN_DIR): + os.rmdir(SKALE_RUN_DIR) + logger.info('Removed SKALE run directory') + + +def cleanup_docker_configuration() -> None: + """Cleanup all skale specific docker configuration files and directories""" + logger.info('Cleaning up docker configuration') + + remove_docker_service_override_config() + reset_docker_daemon_config() + remove_node_docker_config() + remove_skale_run_dir() + restart_docker_service() + wait_for_socket_initialization() + + logger.info('Docker configuration cleanup finished') diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 783722fa..f5e47248 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -58,6 +58,12 @@ BLUEPRINT_NAME = 'schains' +class NoDataDirForChainError(Exception): + """Raised when no data directory is found""" + + pass + + def get_schain_firewall_rules(schain: str) -> None: status, payload = get_request( blueprint=BLUEPRINT_NAME, method='firewall-rules', params={'schain_name': schain} @@ -249,8 +255,17 @@ def ensure_schain_volume(schain: str, schain_type: str, env_type: str) -> None: logger.warning('Volume %s already exists', schain) -def cleanup_sync_datadir(schain_name: str, base_path: str = SCHAINS_MNT_DIR_SINGLE_CHAIN) -> None: - base_path = os.path.join(base_path, schain_name) +def cleanup_datadir_for_single_chain_node( + chain_name: str = '', base_path: str = SCHAINS_MNT_DIR_SINGLE_CHAIN +) -> None: + if not chain_name: + folders = [f for f in os.listdir(base_path) if os.path.isdir(os.path.join(base_path, f))] + if not folders: + raise NoDataDirForChainError( + f'No data directory found in {base_path}. Please check the path or specify a chain name.' + ) + chain_name = folders[0] + base_path = os.path.join(base_path, chain_name) regular_folders_pattern = f'{base_path}/[!snapshots]*' logger.info('Removing regular folders') for filepath in glob.glob(regular_folders_pattern): diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index e63c0401..0dea243f 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -21,20 +21,23 @@ import logging import time -from node_cli.configs import SKALE_DIR, RESTORE_SLEEP_TIMEOUT +from node_cli.configs import RESTORE_SLEEP_TIMEOUT, SKALE_DIR from node_cli.configs.env import SKALE_DIR_ENV_FILEPATH -from node_cli.core.node import compose_node_env +from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.host import save_env_params +from node_cli.core.node import compose_node_env from node_cli.core.static_config import get_static_params from node_cli.mirage.record.chain_record import ChainRecord -from node_cli.operations import restore_mirage_op -from node_cli.utils.decorators import check_inited, check_not_inited +from node_cli.operations import ( + cleanup_mirage_op, + restore_mirage_op, +) +from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeType from node_cli.utils.texts import safe_load_texts - logger = logging.getLogger(__name__) TEXTS = safe_load_texts() @@ -64,3 +67,13 @@ def request_repair(snapshot_from: str = '') -> None: record.set_snapshot_from(snapshot_from) print(TEXTS['mirage']['node']['repair']['repair_requested']) + + +@check_inited +@check_user +def cleanup() -> None: + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE) + chain_name = env['SCHAIN_NAME'] + cleanup_mirage_op(env, chain_name) + logger.info('Mirage node was cleaned up, all containers and data removed') + cleanup_docker_configuration() diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 159c0e16..f4f67b2c 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -32,3 +32,9 @@ cleanup_sync as cleanup_sync_op, configure_nftables, ) +from node_cli.operations.mirage import ( # noqa + update_mirage as update_mirage_op, + MirageUpdateType, + restore_mirage as restore_mirage_op, + cleanup as cleanup_mirage_op, +) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index ccc29a1b..4e0845f0 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -17,20 +17,22 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import time - -import distro import functools import logging +import time from typing import Dict, Optional +import distro + +from node_cli.cli.info import TYPE, VERSION from node_cli.configs import ( CONTAINER_CONFIG_PATH, CONTAINER_CONFIG_TMP_PATH, - SKALE_DIR, GLOBAL_SKALE_DIR, + SKALE_DIR, ) -from node_cli.core.checks import CheckType, run_checks as run_host_checks +from node_cli.core.checks import CheckType +from node_cli.core.checks import run_checks as run_host_checks from node_cli.core.docker_config import configure_docker from node_cli.core.host import ( ensure_btrfs_kernel_module_autoloaded, @@ -40,12 +42,11 @@ from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config from node_cli.core.node_options import NodeOptions -from node_cli.core.resources import update_resource_allocation, init_shared_space_volume +from node_cli.core.resources import init_shared_space_volume, update_resource_allocation from node_cli.core.schains import ( + cleanup_datadir_for_single_chain_node, update_node_cli_schain_status, - cleanup_sync_datadir, ) -from node_cli.cli.info import VERSION, TYPE from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive from node_cli.operations.docker_lvmpy import lvmpy_install from node_cli.operations.skale_node import ( @@ -53,6 +54,7 @@ sync_skale_node, update_images, ) +from node_cli.operations.docker_lvmpy import lvmpy_install from node_cli.operations.volume import ( cleanup_volume_artifacts, ensure_filestorage_mapping, @@ -64,12 +66,16 @@ docker_cleanup, remove_dynamic_containers, ) +<<<<<<< Updated upstream from node_cli.utils.helper import str_to_bool, rm_dir from node_cli.utils.meta import get_meta_info, update_meta +======= +from node_cli.utils.helper import rm_dir, str_to_bool +from node_cli.utils.meta import CliMetaManager, MirageCliMetaManager +>>>>>>> Stashed changes from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks - logger = logging.getLogger(__name__) @@ -508,6 +514,6 @@ def restore_mirage(env, backup_path, config_only=False): def cleanup_sync(env, schain_name: str) -> None: turn_off(env, node_type=NodeType.SYNC) - cleanup_sync_datadir(schain_name=schain_name) + cleanup_datadir_for_single_chain_node(schain_name=schain_name) rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) diff --git a/node_cli/operations/common.py b/node_cli/operations/common.py index 7e484cdf..7c876fa8 100644 --- a/node_cli/operations/common.py +++ b/node_cli/operations/common.py @@ -17,19 +17,18 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import logging import os +import secrets +import shutil import stat import tarfile -import logging -import shutil -import secrets - from shutil import copyfile from node_cli.configs import ( - G_CONF_HOME, FILEBEAT_CONFIG_PATH, FLASK_SECRET_KEY_FILE, + G_CONF_HOME, SRC_FILEBEAT_CONFIG_PATH, ) diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py new file mode 100644 index 00000000..1f59af01 --- /dev/null +++ b/node_cli/operations/mirage.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2021-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import logging +from enum import Enum + +import distro + +from node_cli.cli.info import TYPE, VERSION +from node_cli.configs import ( + CONTAINER_CONFIG_PATH, + GLOBAL_SKALE_DIR, + SKALE_DIR, +) +from node_cli.core.checks import CheckType +from node_cli.core.checks import run_checks as run_host_checks +from node_cli.core.docker_config import cleanup_docker_configuration, configure_docker +from node_cli.core.host import ensure_btrfs_kernel_module_autoloaded, link_env_file, prepare_host +from node_cli.core.nftables import configure_nftables +from node_cli.core.nginx import generate_nginx_config +from node_cli.core.schains import cleanup_datadir_for_single_chain_node +from node_cli.migrations.mirage.from_boot import migrate_nftables_from_boot +from node_cli.operations.base import checked_host, turn_off +from node_cli.operations.common import unpack_backup_archive +from node_cli.operations.config_repo import ( + sync_skale_node, + update_images, +) +from node_cli.utils.docker_utils import ( + NodeType, + compose_rm, + compose_up, + docker_cleanup, + remove_dynamic_containers, +) +from node_cli.utils.helper import rm_dir, str_to_bool +from node_cli.utils.meta import MirageCliMetaManager +from node_cli.utils.print_formatters import print_failed_requirements_checks + +logger = logging.getLogger(__name__) + + +class MirageUpdateType(Enum): + REGULAR = 'regular' + INFRA_ONLY = 'infra_only' + FROM_BOOT = 'from_boot' + + +@checked_host +def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) -> bool: + compose_rm(node_type=NodeType.MIRAGE, env=env) + if update_type not in (MirageUpdateType.INFRA_ONLY, MirageUpdateType.FROM_BOOT): + remove_dynamic_containers() + + sync_skale_node() + ensure_btrfs_kernel_module_autoloaded() + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + configure_nftables() + generate_nginx_config() + + prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) + meta_manager = MirageCliMetaManager() + current_stream = meta_manager.get_meta_info().config_stream + skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' + if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + logger.info( + 'Stream version was changed from %s to %s', + current_stream, + env['CONTAINER_CONFIGS_STREAM'], + ) + docker_cleanup() + + meta_manager.update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + distro.id(), + distro.version(), + ) + + if update_type == MirageUpdateType.FROM_BOOT: + migrate_nftables_from_boot() + update_images(env=env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.MIRAGE) + return True + + +def restore_mirage(env, backup_path, config_only=False): + unpack_backup_archive(backup_path) + failed_checks = run_host_checks( + env['DISK_MOUNTPOINT'], + TYPE, + env['ENV_TYPE'], + CONTAINER_CONFIG_PATH, + check_type=CheckType.PREINSTALL, + ) + if failed_checks: + print_failed_requirements_checks(failed_checks) + return False + + ensure_btrfs_kernel_module_autoloaded() + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) + configure_nftables(enable_monitoring=enable_monitoring) + + link_env_file() + + meta_manager = MirageCliMetaManager() + meta_manager.update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + distro.id(), + distro.version(), + ) + + if not config_only: + compose_up(env=env, node_type=NodeType.MIRAGE) + + failed_checks = run_host_checks( + env['DISK_MOUNTPOINT'], + TYPE, + env['ENV_TYPE'], + CONTAINER_CONFIG_PATH, + check_type=CheckType.POSTINSTALL, + ) + if failed_checks: + print_failed_requirements_checks(failed_checks) + return False + return True + + +def cleanup(env, chain_name: str) -> None: + turn_off(env, node_type=NodeType.MIRAGE) + cleanup_datadir_for_single_chain_node() + rm_dir(GLOBAL_SKALE_DIR) + rm_dir(SKALE_DIR) + cleanup_docker_configuration() diff --git a/tests/core/core_schains_test.py b/tests/core/core_schains_test.py index 4b8e0fcb..c6ce426b 100644 --- a/tests/core/core_schains_test.py +++ b/tests/core/core_schains_test.py @@ -1,13 +1,12 @@ import os -from unittest import mock from pathlib import Path +from unittest import mock import freezegun -from node_cli.core.schains import cleanup_sync_datadir, toggle_schain_repair_mode +from node_cli.core.schains import cleanup_datadir_for_single_chain_node, toggle_schain_repair_mode from node_cli.utils.helper import read_json - -from tests.helper import CURRENT_TIMESTAMP, CURRENT_DATETIME +from tests.helper import CURRENT_DATETIME, CURRENT_TIMESTAMP @freezegun.freeze_time(CURRENT_DATETIME) @@ -82,5 +81,5 @@ def test_cleanup_sync_datadir(tmp_sync_datadir): hash_path.touch() with mock.patch('node_cli.core.schains.rm_btrfs_subvolume'): - cleanup_sync_datadir(schain_name, base_path=tmp_sync_datadir) + cleanup_datadir_for_single_chain_node(schain_name, base_path=tmp_sync_datadir) assert not os.path.isdir(base_folder) From 1caff0d336931ae43b979493911414afdf47ee0c Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 23 Jun 2025 20:03:54 +0100 Subject: [PATCH 118/332] Fix conflicts artefacts --- node_cli/operations/base.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 4e0845f0..01158699 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -54,7 +54,6 @@ sync_skale_node, update_images, ) -from node_cli.operations.docker_lvmpy import lvmpy_install from node_cli.operations.volume import ( cleanup_volume_artifacts, ensure_filestorage_mapping, @@ -66,13 +65,8 @@ docker_cleanup, remove_dynamic_containers, ) -<<<<<<< Updated upstream -from node_cli.utils.helper import str_to_bool, rm_dir -from node_cli.utils.meta import get_meta_info, update_meta -======= from node_cli.utils.helper import rm_dir, str_to_bool -from node_cli.utils.meta import CliMetaManager, MirageCliMetaManager ->>>>>>> Stashed changes +from node_cli.utils.meta import get_meta_info, update_meta from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks From 7aa9182d29e799349bffaa4b7ba195d85e956854 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 23 Jun 2025 20:38:27 +0100 Subject: [PATCH 119/332] Fix mirage boot --- node_cli/utils/docker_utils.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 8bf37e78..11fe3992 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -17,30 +17,29 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import os import io -import time import itertools import logging +import os +import time from typing import Optional import docker from docker.client import DockerClient -from docker.models.containers import Container from docker.errors import NotFound +from docker.models.containers import Container -from node_cli.utils.helper import run_cmd, str_to_bool from node_cli.configs import ( COMPOSE_PATH, - SYNC_COMPOSE_PATH, MIRAGE_COMPOSE_PATH, + NGINX_CONTAINER_NAME, REMOVED_CONTAINERS_FOLDER_PATH, SGX_CERTIFICATES_DIR_NAME, - NGINX_CONTAINER_NAME, + SYNC_COMPOSE_PATH, ) +from node_cli.utils.helper import run_cmd, str_to_bool from node_cli.utils.node_type import NodeType - logger = logging.getLogger(__name__) SCHAIN_REMOVE_TIMEOUT = 300 @@ -76,6 +75,7 @@ BASE_MIRAGE_BOOT_COMPOSE_SERVICES = { **CORE_MIRAGE_COMPOSE_SERVICES, 'mirage-boot': 'mirage_boot_admin', + 'mirage-boot-api': 'mirage_boot_api', } BASE_SYNC_COMPOSE_SERVICES = { @@ -326,10 +326,7 @@ def compose_up( if node_type == NodeType.MIRAGE: logger.info('Running mirage base set of containers') - if not is_mirage_boot: - logger.debug('Launching mirage containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.MIRAGE, services=services), env=env) - else: + if is_mirage_boot: logger.debug('Launching mirage boot containers with env %s', env) run_cmd( cmd=get_up_compose_cmd( @@ -337,6 +334,9 @@ def compose_up( ), env=env, ) + else: + logger.debug('Launching mirage containers with env %s', env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.MIRAGE, services=services), env=env) else: logger.info('Running skale node base set of containers') logger.debug('Launching skale node containers with env %s', env) From 01c2ccdd86f4270f04311137e9dea94dfc067eee Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 24 Jun 2025 10:22:34 +0100 Subject: [PATCH 120/332] Add tests for mirage cleanup --- node_cli/core/schains.py | 3 +- tests/mirage/mirage_node_test.py | 212 ++++++++++++++++++++++++++++++- 2 files changed, 213 insertions(+), 2 deletions(-) diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index dc95e4c1..f61c29f5 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -262,7 +262,8 @@ def cleanup_datadir_for_single_chain_node( folders = [f for f in os.listdir(base_path) if os.path.isdir(os.path.join(base_path, f))] if not folders: raise NoDataDirForChainError( - f'No data directory found in {base_path}. Please check the path or specify a chain name.' + f'No data directory found in {base_path}. ' + 'Please check the path or specify a chain name.' ) chain_name = folders[0] base_path = os.path.join(base_path, chain_name) diff --git a/tests/mirage/mirage_node_test.py b/tests/mirage/mirage_node_test.py index 0b6ed316..f8cc3678 100644 --- a/tests/mirage/mirage_node_test.py +++ b/tests/mirage/mirage_node_test.py @@ -1,11 +1,13 @@ from unittest import mock import freezegun +import pytest from node_cli.configs import SKALE_DIR +from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.mirage.mirage_boot import init as init_boot from node_cli.mirage.mirage_boot import update -from node_cli.mirage.mirage_node import migrate_from_boot, request_repair, restore_mirage +from node_cli.mirage.mirage_node import cleanup, migrate_from_boot, request_repair, restore_mirage from node_cli.operations.mirage import MirageUpdateType from node_cli.utils.node_type import NodeType from tests.helper import CURRENT_DATETIME, CURRENT_TIMESTAMP @@ -139,3 +141,211 @@ def test_mirage_repair(compose_node_env_mock, get_static_params_mock, redis_clie request_repair(snapshot_from='127.0.0.1') assert redis_client.get('test_repair_ts') == f'{CURRENT_TIMESTAMP}'.encode('utf-8') assert redis_client.get('test_snapshot_from') == b'127.0.0.1' + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') +@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +def test_cleanup_success( + mock_compose_env, + mock_cleanup_mirage_op, + mock_cleanup_docker_config, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test_chain'} + mock_compose_env.return_value = mock_env + + cleanup() + + mock_compose_env.assert_called_once_with( + SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE + ) + mock_cleanup_mirage_op.assert_called_once_with(mock_env, 'test_chain') + mock_cleanup_docker_config.assert_called_once() + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') +@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +def test_cleanup_calls_operations_in_correct_order( + mock_compose_env, + mock_cleanup_mirage_op, + mock_cleanup_docker_config, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + from node_cli.mirage.mirage_node import cleanup + + mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test_chain'} + mock_compose_env.return_value = mock_env + + # Create a mock manager to track call order + manager = mock.Mock() + manager.attach_mock(mock_compose_env, 'compose_env') + manager.attach_mock(mock_cleanup_mirage_op, 'cleanup_mirage_op') + manager.attach_mock(mock_cleanup_docker_config, 'cleanup_docker_config') + + cleanup() + + # Verify the order of calls + expected_calls = [ + mock.call.compose_env(mock.ANY, save=False, node_type=mock.ANY), + mock.call.cleanup_mirage_op(mock_env, 'test_chain'), + mock.call.cleanup_docker_config(), + ] + manager.assert_has_calls(expected_calls, any_order=False) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') +@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +def test_cleanup_with_different_chain_names( + mock_compose_env, + mock_cleanup_mirage_op, + mock_cleanup_docker_config, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + test_cases = [ + 'simple_chain', + 'chain-with-hyphens', + 'chain_with_underscores', + 'ChainWithMixedCase', + ] + + for chain_name in test_cases: + mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': chain_name} + mock_compose_env.return_value = mock_env + + cleanup() + + mock_cleanup_mirage_op.assert_called_with(mock_env, chain_name) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') +@mock.patch( + 'node_cli.mirage.mirage_node.cleanup_mirage_op', side_effect=Exception('Cleanup failed') +) +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +def test_cleanup_continues_after_mirage_op_error( + mock_compose_env, + mock_cleanup_mirage_op, + mock_cleanup_docker_config, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test_chain'} + mock_compose_env.return_value = mock_env + + # The function should raise the exception from cleanup_mirage_op + with pytest.raises(Exception, match='Cleanup failed'): + cleanup() + + # But we should still verify the calls were made in order + mock_compose_env.assert_called_once() + mock_cleanup_mirage_op.assert_called_once_with(mock_env, 'test_chain') + # cleanup_docker_configuration should not be called if cleanup_mirage_op fails + mock_cleanup_docker_config.assert_not_called() + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=False) +def test_cleanup_fails_when_user_invalid( + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + """Test that cleanup fails when user validation fails""" + import pytest + + from node_cli.mirage.mirage_node import cleanup + + with pytest.raises(SystemExit): + cleanup() + + +def test_cleanup_fails_when_not_inited(ensure_meta_removed): + import pytest + + with pytest.raises(SystemExit): + cleanup() + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') +@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +@mock.patch('node_cli.mirage.mirage_node.logger') +def test_cleanup_logs_success_message( + mock_logger, + mock_compose_env, + mock_cleanup_mirage_op, + mock_cleanup_docker_config, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test_chain'} + mock_compose_env.return_value = mock_env + + cleanup() + + mock_logger.info.assert_called_once_with( + 'Mirage node was cleaned up, all containers and data removed' + ) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') +@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +def test_cleanup_with_missing_schain_name( + mock_compose_env, + mock_cleanup_mirage_op, + mock_cleanup_docker_config, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet'} # Missing SCHAIN_NAME + mock_compose_env.return_value = mock_env + + with pytest.raises(KeyError): + cleanup() + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') +@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') +@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +def test_cleanup_with_empty_schain_name( + mock_compose_env, + mock_cleanup_mirage_op, + mock_cleanup_docker_config, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': ''} + mock_compose_env.return_value = mock_env + + cleanup() + + mock_cleanup_mirage_op.assert_called_once_with(mock_env, '') + mock_cleanup_docker_config.assert_called_once() From 47443e620537848df7f374c96042e7775c88c8cd Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 25 Jun 2025 12:43:18 +0100 Subject: [PATCH 121/332] Fix mirage boot api container --- node_cli/utils/docker_utils.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 8bf37e78..0cc5da45 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -65,17 +65,16 @@ 'bounty': 'skale_bounty', } -CORE_MIRAGE_COMPOSE_SERVICES = { - **CORE_COMMON_COMPOSE_SERVICES, - 'mirage-api': 'mirage_api', -} BASE_MIRAGE_COMPOSE_SERVICES = { - **CORE_MIRAGE_COMPOSE_SERVICES, + **CORE_COMMON_COMPOSE_SERVICES, 'mirage-admin': 'mirage_admin', + 'mirage-api': 'mirage_api', } + BASE_MIRAGE_BOOT_COMPOSE_SERVICES = { - **CORE_MIRAGE_COMPOSE_SERVICES, + **CORE_COMMON_COMPOSE_SERVICES, 'mirage-boot': 'mirage_boot_admin', + 'mirage-boot-api': 'mirage_boot_api', } BASE_SYNC_COMPOSE_SERVICES = { From f8a8bc619dbd6234d771f62de3d1cc46daf37afd Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 27 Jun 2025 15:40:06 +0100 Subject: [PATCH 122/332] Remove unused schain_name --- node_cli/mirage/mirage_node.py | 3 +-- node_cli/operations/mirage.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index 1ed7b8ff..34e5aff8 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -92,7 +92,6 @@ def request_repair(snapshot_from: str = '') -> None: @check_user def cleanup() -> None: env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE) - chain_name = env['SCHAIN_NAME'] - cleanup_mirage_op(env, chain_name) + cleanup_mirage_op(env) logger.info('Mirage node was cleaned up, all containers and data removed') cleanup_docker_configuration() diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py index 1b1bb336..f45b5a6d 100644 --- a/node_cli/operations/mirage.py +++ b/node_cli/operations/mirage.py @@ -163,7 +163,7 @@ def restore_mirage(env, backup_path, config_only=False): return True -def cleanup(env, chain_name: str) -> None: +def cleanup(env) -> None: turn_off(env, node_type=NodeType.MIRAGE) cleanup_datadir_for_single_chain_node() rm_dir(GLOBAL_SKALE_DIR) From 68ed84e0e8a620b57513e89143569363b45ccefb Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 2 Jul 2025 16:56:56 +0100 Subject: [PATCH 123/332] Fix typo --- node_cli/configs/__init__.py | 4 ++-- node_cli/core/docker_config.py | 15 ++++++--------- node_cli/core/schains.py | 2 -- 3 files changed, 8 insertions(+), 13 deletions(-) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index be9f8bdd..adfd0e46 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -19,8 +19,8 @@ import os import sys -from node_cli.utils.global_config import read_g_config +from node_cli.utils.global_config import read_g_config GLOBAL_SKALE_DIR = os.getenv('GLOBAL_SKALE_DIR') or '/etc/skale' GLOBAL_SKALE_CONF_FILENAME = 'conf.json' @@ -146,7 +146,7 @@ def _get_env(): SKALE_NODE_REPO_URL = 'https://github.com/skalenetwork/skale-node.git' DOCKER_LVMPY_REPO_URL = 'https://github.com/skalenetwork/docker-lvmpy.git' -DOCKER_DEAMON_CONFIG_PATH = '/etc/docker/daemon.json' +DOCKER_DAEMON_CONFIG_PATH = '/etc/docker/daemon.json' DOCKER_DAEMON_HOSTS = ('fd://', 'unix:///var/run/skale/docker.sock') DOCKER_SERVICE_CONFIG_DIR = '/etc/systemd/system/docker.service.d' DOCKER_SERVICE_CONFIG_PATH = '/etc/systemd/system/docker.service.d/no-host.conf' diff --git a/node_cli/core/docker_config.py b/node_cli/core/docker_config.py index 15d6d9de..c6102eec 100644 --- a/node_cli/core/docker_config.py +++ b/node_cli/core/docker_config.py @@ -9,8 +9,8 @@ from typing import Optional, Tuple from node_cli.configs import ( + DOCKER_DAEMON_CONFIG_PATH, DOCKER_DAEMON_HOSTS, - DOCKER_DEAMON_CONFIG_PATH, DOCKER_SERVICE_CONFIG_DIR, DOCKER_SERVICE_CONFIG_PATH, DOCKER_SOCKET_PATH, @@ -107,7 +107,7 @@ def ensure_service_overriden_config( def ensure_docker_daemon_config( - daemon_config_path: Path = DOCKER_DEAMON_CONFIG_PATH, daemon_hosts: Path = DOCKER_DAEMON_HOSTS + daemon_config_path: Path = DOCKER_DAEMON_CONFIG_PATH, daemon_hosts: Path = DOCKER_DAEMON_HOSTS ) -> None: logger.info('Ensuring docker daemon config') config = {} @@ -191,22 +191,19 @@ def remove_docker_service_override_config() -> None: def reset_docker_daemon_config() -> None: - if os.path.isfile(DOCKER_DEAMON_CONFIG_PATH): + if os.path.isfile(DOCKER_DAEMON_CONFIG_PATH): logger.info('Resetting docker daemon config') - with open(DOCKER_DEAMON_CONFIG_PATH, 'r') as daemon_config: + with open(DOCKER_DAEMON_CONFIG_PATH, 'r') as daemon_config: config = json.load(daemon_config) - # Remove the keys we added config.pop('live-restore', None) config.pop('hosts', None) if config: - # Write back remaining config - with open(DOCKER_DEAMON_CONFIG_PATH, 'w') as daemon_config: + with open(DOCKER_DAEMON_CONFIG_PATH, 'w') as daemon_config: json.dump(config, daemon_config) else: - # Remove file if empty - os.remove(DOCKER_DEAMON_CONFIG_PATH) + os.remove(DOCKER_DAEMON_CONFIG_PATH) def remove_node_docker_config() -> None: diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index f61c29f5..eb420fdb 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -61,8 +61,6 @@ class NoDataDirForChainError(Exception): """Raised when no data directory is found""" - pass - def get_schain_firewall_rules(schain: str) -> None: status, payload = get_request( From 436f2b19d652844cffdcd48bbe82f26c7820c8b8 Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 2 Jul 2025 17:29:27 +0100 Subject: [PATCH 124/332] Fix tests --- tests/mirage/mirage_node_test.py | 90 +++----------------------------- 1 file changed, 7 insertions(+), 83 deletions(-) diff --git a/tests/mirage/mirage_node_test.py b/tests/mirage/mirage_node_test.py index f8cc3678..f4f9d340 100644 --- a/tests/mirage/mirage_node_test.py +++ b/tests/mirage/mirage_node_test.py @@ -156,7 +156,7 @@ def test_cleanup_success( resource_alloc, meta_file_v3, ): - mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test_chain'} + mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env cleanup() @@ -164,7 +164,7 @@ def test_cleanup_success( mock_compose_env.assert_called_once_with( SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE ) - mock_cleanup_mirage_op.assert_called_once_with(mock_env, 'test_chain') + mock_cleanup_mirage_op.assert_called_once_with(mock_env) mock_cleanup_docker_config.assert_called_once() @@ -183,10 +183,9 @@ def test_cleanup_calls_operations_in_correct_order( ): from node_cli.mirage.mirage_node import cleanup - mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test_chain'} + mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env - # Create a mock manager to track call order manager = mock.Mock() manager.attach_mock(mock_compose_env, 'compose_env') manager.attach_mock(mock_cleanup_mirage_op, 'cleanup_mirage_op') @@ -194,44 +193,14 @@ def test_cleanup_calls_operations_in_correct_order( cleanup() - # Verify the order of calls expected_calls = [ mock.call.compose_env(mock.ANY, save=False, node_type=mock.ANY), - mock.call.cleanup_mirage_op(mock_env, 'test_chain'), + mock.call.cleanup_mirage_op(mock_env), mock.call.cleanup_docker_config(), ] manager.assert_has_calls(expected_calls, any_order=False) -@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') -@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') -def test_cleanup_with_different_chain_names( - mock_compose_env, - mock_cleanup_mirage_op, - mock_cleanup_docker_config, - mock_is_user_valid, - inited_node, - resource_alloc, - meta_file_v3, -): - test_cases = [ - 'simple_chain', - 'chain-with-hyphens', - 'chain_with_underscores', - 'ChainWithMixedCase', - ] - - for chain_name in test_cases: - mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': chain_name} - mock_compose_env.return_value = mock_env - - cleanup() - - mock_cleanup_mirage_op.assert_called_with(mock_env, chain_name) - - @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) @mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') @mock.patch( @@ -247,17 +216,14 @@ def test_cleanup_continues_after_mirage_op_error( resource_alloc, meta_file_v3, ): - mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test_chain'} + mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env - # The function should raise the exception from cleanup_mirage_op with pytest.raises(Exception, match='Cleanup failed'): cleanup() - # But we should still verify the calls were made in order mock_compose_env.assert_called_once() - mock_cleanup_mirage_op.assert_called_once_with(mock_env, 'test_chain') - # cleanup_docker_configuration should not be called if cleanup_mirage_op fails + mock_cleanup_mirage_op.assert_called_once_with(mock_env) mock_cleanup_docker_config.assert_not_called() @@ -299,7 +265,7 @@ def test_cleanup_logs_success_message( resource_alloc, meta_file_v3, ): - mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test_chain'} + mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env cleanup() @@ -307,45 +273,3 @@ def test_cleanup_logs_success_message( mock_logger.info.assert_called_once_with( 'Mirage node was cleaned up, all containers and data removed' ) - - -@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') -@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') -def test_cleanup_with_missing_schain_name( - mock_compose_env, - mock_cleanup_mirage_op, - mock_cleanup_docker_config, - mock_is_user_valid, - inited_node, - resource_alloc, - meta_file_v3, -): - mock_env = {'ENV_TYPE': 'devnet'} # Missing SCHAIN_NAME - mock_compose_env.return_value = mock_env - - with pytest.raises(KeyError): - cleanup() - - -@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') -@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') -def test_cleanup_with_empty_schain_name( - mock_compose_env, - mock_cleanup_mirage_op, - mock_cleanup_docker_config, - mock_is_user_valid, - inited_node, - resource_alloc, - meta_file_v3, -): - mock_env = {'ENV_TYPE': 'devnet', 'SCHAIN_NAME': ''} - mock_compose_env.return_value = mock_env - - cleanup() - - mock_cleanup_mirage_op.assert_called_once_with(mock_env, '') - mock_cleanup_docker_config.assert_called_once() From 0725c6676d8f4802eceb6141ddcd9cbe757dde3d Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 2 Jul 2025 18:45:18 +0100 Subject: [PATCH 125/332] Fix cleanup --- node_cli/core/docker_config.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/node_cli/core/docker_config.py b/node_cli/core/docker_config.py index c6102eec..67cf3d86 100644 --- a/node_cli/core/docker_config.py +++ b/node_cli/core/docker_config.py @@ -4,6 +4,7 @@ import logging import os import pathlib +import shutil import time import typing from typing import Optional, Tuple @@ -214,7 +215,7 @@ def remove_node_docker_config() -> None: def remove_skale_run_dir() -> None: if os.path.isdir(SKALE_RUN_DIR): - os.rmdir(SKALE_RUN_DIR) + shutil.rmtree(SKALE_RUN_DIR) logger.info('Removed SKALE run directory') @@ -227,6 +228,4 @@ def cleanup_docker_configuration() -> None: remove_node_docker_config() remove_skale_run_dir() restart_docker_service() - wait_for_socket_initialization() - logger.info('Docker configuration cleanup finished') From bbed327f374e4891f601851aadae37c34718ae2b Mon Sep 17 00:00:00 2001 From: badrogger Date: Thu, 3 Jul 2025 21:39:12 +0100 Subject: [PATCH 126/332] Add mirage register command --- node_cli/cli/mirage_node.py | 19 ++++++++++++----- node_cli/core/wallet.py | 4 +++- node_cli/mirage/mirage_node.py | 26 ++++++++++++++++++++++-- node_cli/mirage/wallet.py | 37 ++++++++++++++++++++++++++++++++++ text.yml | 7 +++++++ 5 files changed, 85 insertions(+), 8 deletions(-) create mode 100644 node_cli/mirage/wallet.py diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 4d9c846c..6e9d5e93 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -21,8 +21,9 @@ from node_cli.core.node import backup, get_node_info, get_node_signature from node_cli.mirage.mirage_node import cleanup as mirage_cleanup -from node_cli.mirage.mirage_node import migrate_from_boot, request_repair, restore_mirage -from node_cli.utils.helper import URL_TYPE, abort_if_false, error_exit, streamed_cmd +from node_cli.mirage.mirage_node import migrate_from_boot, register, request_repair, restore_mirage +from node_cli.mirage.wallet import get_wallet_info +from node_cli.utils.helper import IP_TYPE, URL_TYPE, abort_if_false, error_exit, streamed_cmd from node_cli.utils.texts import safe_load_texts TEXTS = safe_load_texts() @@ -49,9 +50,11 @@ def init_node(): click.echo("Placeholder: Command 'mirage node init' is not yet implemented.") -@node.command('register', help='Register Mirage node (Placeholder for regular operations).') -def register_node(): - click.echo("Placeholder: Command 'mirage node register' is not yet implemented.") +@node.command('register', help=TEXTS['mirage']['node']['register']['help']) +@click.option('--name', '-n', required=True, help=TEXTS['mirage']['node']['register']['name']) +@click.option('--ip', required=True, type=IP_TYPE, help=TEXTS['mirage']['node']['register']['ip']) +def register_node(name: str, ip: str) -> None: + register(name=name, ip=ip) @node.command('update', help='Update Mirage.') @@ -144,3 +147,9 @@ def repair(snapshot_from: str = '') -> None: @streamed_cmd def cleanup_node(): mirage_cleanup() + + +@node.command('info', help='Get info about MIRAGE node wallet') +@click.option('--format', '-f', type=click.Choice(['json', 'text'])) +def wallet_info(format): + get_wallet_info(format) diff --git a/node_cli/core/wallet.py b/node_cli/core/wallet.py index 13f980e0..f4a5db6d 100644 --- a/node_cli/core/wallet.py +++ b/node_cli/core/wallet.py @@ -18,11 +18,13 @@ # along with this program. If not, see . import json +import logging from node_cli.utils.print_formatters import print_wallet_info, TEXTS -from node_cli.utils.helper import error_exit, get_request, post_request, logger +from node_cli.utils.helper import error_exit, get_request, post_request from node_cli.utils.exit_codes import CLIExitCodes +logger = logging.getLogger(__name__) BLUEPRINT_NAME = 'wallet' diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index 34e5aff8..3511ac11 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -24,7 +24,7 @@ from node_cli.configs import RESTORE_SLEEP_TIMEOUT, SKALE_DIR from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.core.docker_config import cleanup_docker_configuration -from node_cli.core.host import save_env_params +from node_cli.core.host import is_node_inited, save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.mirage.record.chain_record import get_mirage_chain_record from node_cli.operations import ( @@ -35,7 +35,7 @@ ) from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes -from node_cli.utils.helper import error_exit +from node_cli.utils.helper import error_exit, post_request from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_node_cmd_error from node_cli.utils.texts import safe_load_texts @@ -43,6 +43,8 @@ logger = logging.getLogger(__name__) TEXTS = safe_load_texts() +NODE_BLUEPRINT_NAME = 'mirage-node' + @check_not_inited def restore_mirage(backup_path, env_filepath, config_only=False): @@ -95,3 +97,23 @@ def cleanup() -> None: cleanup_mirage_op(env) logger.info('Mirage node was cleaned up, all containers and data removed') cleanup_docker_configuration() + + +@check_inited +@check_user +def register(name: str, ip: str) -> None: + if not is_node_inited(): + print(TEXTS['mirage']['node']['not_inited']) + return + + # todo: add name, ips and port checks + json_data = {'name': name, 'ip': ip} + status, payload = post_request(blueprint=NODE_BLUEPRINT_NAME, method='register', json=json_data) + if status == 'ok': + msg = TEXTS['mirage']['node']['registered'] + logger.info(msg) + print(msg) + else: + error_msg = payload + logger.error(f'Registration error {error_msg}') + error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/node_cli/mirage/wallet.py b/node_cli/mirage/wallet.py new file mode 100644 index 00000000..62548ca6 --- /dev/null +++ b/node_cli/mirage/wallet.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import json + +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit, get_request +from node_cli.utils.print_formatters import print_wallet_info + +BLUEPRINT_NAME = 'wallet' + + +def get_wallet_info(_format): + status, payload = get_request(BLUEPRINT_NAME, 'mirage-info') + if status == 'ok': + if _format == 'json': + print(json.dumps(payload)) + else: + print_wallet_info(payload) + else: + error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/text.yml b/text.yml index 004dfced..2879b312 100644 --- a/text.yml +++ b/text.yml @@ -84,3 +84,10 @@ mirage: warning: Are you sure you want to repair mirage chain node? In rare cases may cause data loss and require additional maintenance snapshot_from: IP of the node to take snapshot from repair_requested: Repair mode is requested + not_inited: Node should be initialized to proceed with operation + + registered: Node is registered in mirage manager. + register: + help: Register node in mirage manager + name: Name of the node in mirage manager + ip: IP address of the node in mirage manager From 68d11d9c4f5241c7f2c1c5bf2bcbb57128590dd5 Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 4 Jul 2025 23:43:45 +0100 Subject: [PATCH 127/332] Add mirage init and update --- node_cli/cli/mirage_node.py | 31 +++++++------ node_cli/mirage/mirage_boot.py | 7 ++- node_cli/mirage/mirage_node.py | 36 +++++++++++++++ node_cli/operations/__init__.py | 2 +- node_cli/operations/mirage.py | 78 ++++++++++++++++++++++++++++++++- 5 files changed, 134 insertions(+), 20 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 4d9c846c..9f8e58d3 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -21,7 +21,9 @@ from node_cli.core.node import backup, get_node_info, get_node_signature from node_cli.mirage.mirage_node import cleanup as mirage_cleanup +from node_cli.mirage.mirage_node import init as init_mirage from node_cli.mirage.mirage_node import migrate_from_boot, request_repair, restore_mirage +from node_cli.mirage.mirage_node import update as update_mirage from node_cli.utils.helper import URL_TYPE, abort_if_false, error_exit, streamed_cmd from node_cli.utils.texts import safe_load_texts @@ -44,17 +46,16 @@ def mirage_node_info(format): get_node_info(format) -@node.command('init', help='Initialize regular Mirage node operations (Placeholder).') -def init_node(): - click.echo("Placeholder: Command 'mirage node init' is not yet implemented.") - - -@node.command('register', help='Register Mirage node (Placeholder for regular operations).') -def register_node(): - click.echo("Placeholder: Command 'mirage node register' is not yet implemented.") +@node.command('init', help='Initialize regular Mirage node') +@click.argument('env_filepath') +@streamed_cmd +def init_node(env_filepath: str): + init_mirage(env_filepath=env_filepath) -@node.command('update', help='Update Mirage.') +@node.command('update', help='Update Mirage node') +@click.argument('env_filepath') +@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) @click.option( '--yes', is_flag=True, @@ -63,11 +64,13 @@ def register_node(): prompt='Are you sure you want to update Mirage node software?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) -@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) -@click.argument('env_file') -@streamed_cmd -def update_node(env_file, pull_config_for_schain, unsafe_ok): - click.echo("Placeholder: Command 'mirage node update' is not yet implemented.") +def update_node(env_filepath: str, pull_config_for_schain): + update_mirage(env_filepath=env_filepath, pull_config_for_schain=pull_config_for_schain) + + +@node.command('register', help='Register Mirage node (Placeholder for regular operations).') +def register_node(): + click.echo("Placeholder: Command 'mirage node register' is not yet implemented.") @node.command('signature', help='Get mirage node signature for a validator ID.') diff --git a/node_cli/mirage/mirage_boot.py b/node_cli/mirage/mirage_boot.py index 5d6586b9..1bdc6f5a 100644 --- a/node_cli/mirage/mirage_boot.py +++ b/node_cli/mirage/mirage_boot.py @@ -23,14 +23,13 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.operations import init_mirage_boot_op, update_mirage_boot_op -from node_cli.utils.decorators import check_not_inited, check_inited, check_user +from node_cli.operations import init_mirage_op, update_mirage_boot_op +from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_node_cmd_error - logger = logging.getLogger(__name__) @@ -42,7 +41,7 @@ def init(env_filepath: str) -> None: is_mirage_boot=True, ) - init_mirage_boot_op(env_filepath, env) + init_mirage_op(env_filepath, env, is_boot=True) logger.info('Waiting for mirage containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True): diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index 34e5aff8..d0c9cad0 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -30,6 +30,7 @@ from node_cli.operations import ( MirageUpdateType, cleanup_mirage_op, + init_mirage_op, restore_mirage_op, update_mirage_op, ) @@ -80,6 +81,26 @@ def migrate_from_boot( logger.info('Migration from boot to mirage completed successfully') +@check_inited +@check_user +def update(env_filepath: str, pull_config_for_schain: str | None = None) -> None: + logger.info('Updating mirage node...') + env = compose_node_env( + env_filepath, + inited_node=True, + sync_schains=False, + node_type=NodeType.MIRAGE, + pull_config_for_schain=pull_config_for_schain, + ) + update_ok = update_mirage_op(env_filepath, env, update_type=MirageUpdateType.REGULAR) + alive = is_base_containers_alive(node_type=NodeType.MIRAGE) + if not update_ok or not alive: + print_node_cmd_error() + return + else: + logger.info('Mirage update completed successfully') + + def request_repair(snapshot_from: str = '') -> None: env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE) record = get_mirage_chain_record(env) @@ -95,3 +116,18 @@ def cleanup() -> None: cleanup_mirage_op(env) logger.info('Mirage node was cleaned up, all containers and data removed') cleanup_docker_configuration() + + +@check_not_inited +def init(env_filepath: str) -> None: + env = compose_node_env(env_filepath, node_type=NodeType.MIRAGE) + if env is None: + return + save_env_params(env_filepath) + env['SKALE_DIR'] = SKALE_DIR + + init_ok = init_mirage_op(env_filepath, env) + if not init_ok: + error_exit('Init operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) + time.sleep(RESTORE_SLEEP_TIMEOUT) + print('Mirage node is initialized') diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 44786972..c04452b2 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -21,7 +21,6 @@ update as update_op, init as init_op, init_sync as init_sync_op, - init_mirage_boot as init_mirage_boot_op, update_mirage_boot as update_mirage_boot_op, update_sync as update_sync_op, turn_off as turn_off_op, @@ -31,6 +30,7 @@ configure_nftables, ) from node_cli.operations.mirage import ( # noqa + init as init_mirage_op, update_mirage as update_mirage_op, MirageUpdateType, restore_mirage as restore_mirage_op, diff --git a/node_cli/operations/mirage.py b/node_cli/operations/mirage.py index f45b5a6d..b30c5038 100644 --- a/node_cli/operations/mirage.py +++ b/node_cli/operations/mirage.py @@ -39,11 +39,12 @@ from node_cli.migrations.mirage.from_boot import migrate_nftables_from_boot from node_cli.mirage.record.chain_record import migrate_chain_record from node_cli.operations.base import checked_host, turn_off -from node_cli.operations.common import unpack_backup_archive +from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive from node_cli.operations.config_repo import ( sync_skale_node, update_images, ) +from node_cli.operations.volume import cleanup_volume_artifacts, prepare_block_device from node_cli.utils.docker_utils import ( REDIS_SERVICE_DICT, REDIS_START_TIMEOUT, @@ -67,6 +68,81 @@ class MirageUpdateType(Enum): FROM_BOOT = 'from_boot' +@checked_host +def init(env_filepath: str, env: dict) -> bool: + sync_skale_node() + ensure_btrfs_kernel_module_autoloaded() + cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + configure_nftables() + configure_filebeat() + configure_flask() + generate_nginx_config() + + prepare_host(env_filepath, env_type=env['ENV_TYPE']) + link_env_file() + + prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + + meta_manager = MirageCliMetaManager() + meta_manager.update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + distro.id(), + distro.version(), + ) + update_images(env=env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.MIRAGE) + wait_for_container(REDIS_SERVICE_DICT['redis']) + time.sleep(REDIS_START_TIMEOUT) + return True + + +@checked_host +def update_mirage_boot(env_filepath: str, env: dict) -> bool: + compose_rm(node_type=NodeType.MIRAGE, env=env) + remove_dynamic_containers() + cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) + + sync_skale_node() + ensure_btrfs_kernel_module_autoloaded() + + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) + configure_nftables(enable_monitoring=enable_monitoring) + + generate_nginx_config() + prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + + prepare_host(env_filepath, env['ENV_TYPE']) + + meta_manager = MirageCliMetaManager() + current_stream = meta_manager.get_meta_info().config_stream + skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' + if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + logger.info( + 'Stream version was changed from %s to %s', + current_stream, + env['CONTAINER_CONFIGS_STREAM'], + ) + docker_cleanup() + + meta_manager.update_meta( + VERSION, + env['CONTAINER_CONFIGS_STREAM'], + distro.id(), + distro.version(), + ) + update_images(env=env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) + return True + + @checked_host def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) -> bool: compose_rm(node_type=NodeType.MIRAGE, env=env) From e9ce63ae70ab068cf8d3cb72a95a21f8b0a331cf Mon Sep 17 00:00:00 2001 From: badrogger Date: Sat, 5 Jul 2025 00:00:34 +0100 Subject: [PATCH 128/332] Add streamed cmd to mirage node update --- node_cli/cli/mirage_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 9f8e58d3..269b5283 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -55,7 +55,6 @@ def init_node(env_filepath: str): @node.command('update', help='Update Mirage node') @click.argument('env_filepath') -@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) @click.option( '--yes', is_flag=True, @@ -64,6 +63,7 @@ def init_node(env_filepath: str): prompt='Are you sure you want to update Mirage node software?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) +@streamed_cmd def update_node(env_filepath: str, pull_config_for_schain): update_mirage(env_filepath=env_filepath, pull_config_for_schain=pull_config_for_schain) From 71709071be8568668c01e0d0095e6dae0aba8d4b Mon Sep 17 00:00:00 2001 From: badrogger Date: Sat, 5 Jul 2025 16:18:51 +0100 Subject: [PATCH 129/332] Fix routes --- node_cli/configs/routes.py | 2 +- node_cli/mirage/mirage_boot.py | 4 +-- node_cli/operations/__init__.py | 1 + node_cli/utils/helper.py | 47 ++++++++++++++------------------- tests/cli/mirage_cli_test.py | 39 +++++++++------------------ 5 files changed, 36 insertions(+), 57 deletions(-) diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index 967881c3..acdf0118 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -19,7 +19,6 @@ import os - CURRENT_API_VERSION = 'v1' API_PREFIX = '/api' @@ -41,6 +40,7 @@ 'schains': ['config', 'list', 'dkg-statuses', 'firewall-rules', 'repair', 'get'], 'ssl': ['status', 'upload'], 'wallet': ['info', 'send-eth'], + 'mirage-node': ['info', 'register'], } } diff --git a/node_cli/mirage/mirage_boot.py b/node_cli/mirage/mirage_boot.py index 1bdc6f5a..31c95cb9 100644 --- a/node_cli/mirage/mirage_boot.py +++ b/node_cli/mirage/mirage_boot.py @@ -23,7 +23,7 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.operations import init_mirage_op, update_mirage_boot_op +from node_cli.operations import init_mirage_boot_op, update_mirage_boot_op from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit @@ -41,7 +41,7 @@ def init(env_filepath: str) -> None: is_mirage_boot=True, ) - init_mirage_op(env_filepath, env, is_boot=True) + init_mirage_boot_op(env_filepath, env) logger.info('Waiting for mirage containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True): diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index c04452b2..12c9ef63 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -21,6 +21,7 @@ update as update_op, init as init_op, init_sync as init_sync_op, + init_mirage_boot as init_mirage_boot_op, update_mirage_boot as update_mirage_boot_op, update_sync as update_sync_op, turn_off as turn_off_op, diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index c9b21d9f..32269dfa 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -17,58 +17,51 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import distutils +import distutils.util import ipaddress import json +import logging +import logging.handlers as py_handlers import os import re -import socket -import sys -import uuid -from urllib.parse import urlparse -from typing import Any, Optional, NoReturn - -import yaml import shutil -import requests +import socket import subprocess -import urllib.request - +import sys import urllib.parse +import urllib.request +import uuid from functools import wraps - -import logging from logging import Formatter, StreamHandler -import logging.handlers as py_handlers - -import distutils -import distutils.util +from typing import Any, NoReturn, Optional +from urllib.parse import urlparse import click - +import requests +import yaml from jinja2 import Environment -from node_cli.utils.print_formatters import print_err_response -from node_cli.utils.exit_codes import CLIExitCodes from node_cli.configs import ( ADMIN_HOST, ADMIN_PORT, - HIDE_STREAM_LOG, - GLOBAL_SKALE_DIR, - GLOBAL_SKALE_CONF_FILEPATH, DEFAULT_SSH_PORT, + GLOBAL_SKALE_CONF_FILEPATH, + GLOBAL_SKALE_DIR, + HIDE_STREAM_LOG, ) -from node_cli.configs.routes import get_route -from node_cli.utils.global_config import read_g_config, get_system_user - from node_cli.configs.cli_logger import ( + DEBUG_LOG_FILEPATH, FILE_LOG_FORMAT, LOG_BACKUP_COUNT, LOG_FILE_SIZE_BYTES, LOG_FILEPATH, STREAM_LOG_FORMAT, - DEBUG_LOG_FILEPATH, ) - +from node_cli.configs.routes import get_route +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.global_config import get_system_user, read_g_config +from node_cli.utils.print_formatters import print_err_response logger = logging.getLogger(__name__) diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py index 5072bd40..bfb11b03 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/mirage_cli_test.py @@ -1,21 +1,22 @@ -from click.testing import CliRunner -from unittest import mock import pathlib +from unittest import mock + +from click.testing import CliRunner -from node_cli.cli.mirage_node import ( - restore_node, - backup_node, - signature_node, - init_node as init_node_placeholder, - register_node as register_node_placeholder, - update_node as update_node_placeholder, - migrate_node, -) from node_cli.cli.mirage_boot import ( init_boot, register_boot, signature_boot, ) +from node_cli.cli.mirage_node import ( + backup_node, + migrate_node, + restore_node, + signature_node, +) +from node_cli.cli.mirage_node import ( + init_node as init_node_placeholder, +) @mock.patch('node_cli.cli.mirage_node.restore_mirage') @@ -92,22 +93,6 @@ def test_mirage_node_init_placeholder(): assert "Placeholder: Command 'mirage node init' is not yet implemented." in result.output -def test_mirage_node_register_placeholder(): - runner = CliRunner() - result = runner.invoke(register_node_placeholder, []) - - assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' - assert "Placeholder: Command 'mirage node register' is not yet implemented." in result.output - - -def test_mirage_node_update_placeholder(valid_env_file): - runner = CliRunner() - result = runner.invoke(update_node_placeholder, ['--yes', valid_env_file]) - - assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' - assert "Placeholder: Command 'mirage node update' is not yet implemented." in result.output - - @mock.patch('node_cli.cli.mirage_boot.register') def test_mirage_boot_register(mock_register_core): runner = CliRunner() From 8c0aab33be5a6a5dfe82161057ec314ff1ee21d2 Mon Sep 17 00:00:00 2001 From: badrogger Date: Sun, 6 Jul 2025 12:54:50 +0100 Subject: [PATCH 130/332] Fix tests --- node_cli/cli/mirage_node.py | 5 ++--- node_cli/configs/__init__.py | 1 + node_cli/mirage/mirage_node.py | 7 +++---- tests/cli/mirage_cli_test.py | 11 ----------- tests/routes_test.py | 10 ++++++---- 5 files changed, 12 insertions(+), 22 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 69163183..656ec907 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -56,10 +56,9 @@ def init_node(env_filepath: str): @node.command('register', help=TEXTS['mirage']['node']['register']['help']) -@click.option('--name', '-n', required=True, help=TEXTS['mirage']['node']['register']['name']) @click.option('--ip', required=True, type=IP_TYPE, help=TEXTS['mirage']['node']['register']['ip']) -def register(name: str, ip: str) -> None: - register_mirage(name=name, ip=ip) +def register(ip: str) -> None: + register_mirage(ip=ip) @node.command('update', help='Update Mirage node') diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index adfd0e46..a360ab9a 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -171,3 +171,4 @@ def _get_env(): UFW_IPV6_BEFORE_INPUT_CHAIN = 'ufw6-before-input' REDIS_URI: str = os.getenv('REDIS_URI', 'redis://@127.0.0.1:6379') +DEFAULT_SKALED_BASE_PORT: int = 10000 diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index 49be9c69..2407a317 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -21,7 +21,7 @@ import logging import time -from node_cli.configs import RESTORE_SLEEP_TIMEOUT, SKALE_DIR +from node_cli.configs import DEFAULT_SKALED_BASE_PORT, RESTORE_SLEEP_TIMEOUT, SKALE_DIR from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.host import is_node_inited, save_env_params @@ -137,13 +137,12 @@ def init(env_filepath: str) -> None: @check_inited @check_user -def register(name: str, ip: str) -> None: +def register(ip: str) -> None: if not is_node_inited(): print(TEXTS['mirage']['node']['not_inited']) return - # todo: add name, ips and port checks - json_data = {'name': name, 'ip': ip} + json_data = {'ip': ip, 'port': DEFAULT_SKALED_BASE_PORT} status, payload = post_request(blueprint=NODE_BLUEPRINT_NAME, method='register', json=json_data) if status == 'ok': msg = TEXTS['mirage']['node']['registered'] diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py index bfb11b03..d961aa56 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/mirage_cli_test.py @@ -14,9 +14,6 @@ restore_node, signature_node, ) -from node_cli.cli.mirage_node import ( - init_node as init_node_placeholder, -) @mock.patch('node_cli.cli.mirage_node.restore_mirage') @@ -85,14 +82,6 @@ def test_mirage_node_signature_error(mock_signature_core): assert error_msg in result.output -def test_mirage_node_init_placeholder(): - runner = CliRunner() - result = runner.invoke(init_node_placeholder, []) - - assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' - assert "Placeholder: Command 'mirage node init' is not yet implemented." in result.output - - @mock.patch('node_cli.cli.mirage_boot.register') def test_mirage_boot_register(mock_register_core): runner = CliRunner() diff --git a/tests/routes_test.py b/tests/routes_test.py index ce0adc4e..2bf8ffd2 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -1,12 +1,12 @@ import pytest + from node_cli.configs.routes import ( - route_exists, - get_route, - get_all_available_routes, RouteNotFoundException, + get_all_available_routes, + get_route, + route_exists, ) - ALL_V1_ROUTES = [ '/api/v1/node/info', '/api/v1/node/register', @@ -31,6 +31,8 @@ '/api/v1/ssl/upload', '/api/v1/wallet/info', '/api/v1/wallet/send-eth', + '/api/v1/mirage-node/info', + '/api/v1/mirage-node/register', ] From 45927da09b38e3ac8e6044edd4116adf0073571d Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 7 Jul 2025 16:00:49 +0100 Subject: [PATCH 131/332] Remove mirage node wallet in favour of mirage wallet info --- node_cli/cli/mirage_node.py | 7 ------- node_cli/mirage/wallet.py | 37 ------------------------------------- 2 files changed, 44 deletions(-) delete mode 100644 node_cli/mirage/wallet.py diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 656ec907..29d1d281 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -25,7 +25,6 @@ from node_cli.mirage.mirage_node import migrate_from_boot, request_repair, restore_mirage from node_cli.mirage.mirage_node import register as register_mirage from node_cli.mirage.mirage_node import update as update_mirage -from node_cli.mirage.wallet import get_wallet_info from node_cli.utils.helper import IP_TYPE, URL_TYPE, abort_if_false, error_exit, streamed_cmd from node_cli.utils.texts import safe_load_texts @@ -150,9 +149,3 @@ def repair(snapshot_from: str = '') -> None: @streamed_cmd def cleanup_node(): mirage_cleanup() - - -@node.command('info', help='Get info about MIRAGE node wallet') -@click.option('--format', '-f', type=click.Choice(['json', 'text'])) -def wallet_info(format): - get_wallet_info(format) diff --git a/node_cli/mirage/wallet.py b/node_cli/mirage/wallet.py deleted file mode 100644 index 62548ca6..00000000 --- a/node_cli/mirage/wallet.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of node-cli -# -# Copyright (C) 2025-Present SKALE Labs -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import json - -from node_cli.utils.exit_codes import CLIExitCodes -from node_cli.utils.helper import error_exit, get_request -from node_cli.utils.print_formatters import print_wallet_info - -BLUEPRINT_NAME = 'wallet' - - -def get_wallet_info(_format): - status, payload = get_request(BLUEPRINT_NAME, 'mirage-info') - if status == 'ok': - if _format == 'json': - print(json.dumps(payload)) - else: - print_wallet_info(payload) - else: - error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) From 965675bd319ae936b0bd28da0dbd0bb9d883db33 Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 7 Jul 2025 16:34:49 +0100 Subject: [PATCH 132/332] Add wallet info support for mirage --- node_cli/core/wallet.py | 11 ++++++++--- node_cli/utils/print_formatters.py | 20 ++++++++++++++++---- 2 files changed, 24 insertions(+), 7 deletions(-) diff --git a/node_cli/core/wallet.py b/node_cli/core/wallet.py index f4a5db6d..dd41db5a 100644 --- a/node_cli/core/wallet.py +++ b/node_cli/core/wallet.py @@ -20,9 +20,11 @@ import json import logging -from node_cli.utils.print_formatters import print_wallet_info, TEXTS -from node_cli.utils.helper import error_exit, get_request, post_request +from node_cli.cli.info import TYPE +from node_cli.core.node import NodeType from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit, get_request, post_request +from node_cli.utils.print_formatters import TEXTS, print_mirage_wallet_info, print_wallet_info logger = logging.getLogger(__name__) @@ -35,7 +37,10 @@ def get_wallet_info(_format): if _format == 'json': print(json.dumps(payload)) else: - print_wallet_info(payload) + if TYPE == NodeType.MIRAGE: + print_mirage_wallet_info(payload) + else: + print_wallet_info(payload) else: error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index 3ba9171b..339c9c8f 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -17,15 +17,15 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import os -import json import datetime +import inspect +import json +import os from typing import Any + import texttable from dateutil import parser -import inspect - from node_cli.configs import LONG_LINE from node_cli.configs.cli_logger import DEBUG_LOG_FILEPATH from node_cli.utils.meta import CliMeta @@ -46,6 +46,18 @@ def print_wallet_info(wallet): ) +def print_mirage_wallet_info(wallet): + print( + inspect.cleandoc(f""" + {LONG_LINE} + Address: {wallet['address'].lower()} + MIRAGE balance: {wallet['mirage_balance']} ETH + MIRAGE balance WEI: {wallet['mirage_balance_wei']} WEI + {LONG_LINE} + """) + ) + + def get_tty_width(): tty_size = os.popen('stty size 2> /dev/null', 'r').read().split() if len(tty_size) != 2: From 0a2aa222840733d00fb10986549b048d52bb0228 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Sat, 12 Jul 2025 16:47:34 +0100 Subject: [PATCH 133/332] Fix fair node info and fair wallet info commands --- node_cli/cli/mirage_node.py | 20 ++++++++------------ node_cli/core/node.py | 2 +- node_cli/core/wallet.py | 13 +++++++------ node_cli/mirage/mirage_node.py | 26 ++++++++++++++++++++++---- node_cli/utils/helper.py | 6 ++++-- node_cli/utils/print_formatters.py | 18 ++++++++++++++++-- 6 files changed, 58 insertions(+), 27 deletions(-) diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/mirage_node.py index 29d1d281..e76e4f4b 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/mirage_node.py @@ -19,13 +19,18 @@ import click -from node_cli.core.node import backup, get_node_info, get_node_signature +from node_cli.core.node import backup from node_cli.mirage.mirage_node import cleanup as mirage_cleanup from node_cli.mirage.mirage_node import init as init_mirage -from node_cli.mirage.mirage_node import migrate_from_boot, request_repair, restore_mirage +from node_cli.mirage.mirage_node import ( + migrate_from_boot, + request_repair, + restore_mirage, + get_node_info, +) from node_cli.mirage.mirage_node import register as register_mirage from node_cli.mirage.mirage_node import update as update_mirage -from node_cli.utils.helper import IP_TYPE, URL_TYPE, abort_if_false, error_exit, streamed_cmd +from node_cli.utils.helper import IP_TYPE, URL_TYPE, abort_if_false, streamed_cmd from node_cli.utils.texts import safe_load_texts TEXTS = safe_load_texts() @@ -75,15 +80,6 @@ def update_node(env_filepath: str, pull_config_for_schain): update_mirage(env_filepath=env_filepath, pull_config_for_schain=pull_config_for_schain) -@node.command('signature', help='Get mirage node signature for a validator ID.') -@click.argument('validator_id') -def signature_node(validator_id): - res = get_node_signature(validator_id) - if isinstance(res, dict) and 'error' in res: - error_exit(f'Error getting signature: {res.get("message", res)}') - print(f'Signature: {res}') - - @node.command('backup', help='Generate backup file for the Mirage node.') @click.argument('backup_folder_path') @streamed_cmd diff --git a/node_cli/core/node.py b/node_cli/core/node.py index ca60ba5e..4d41dcde 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -370,7 +370,7 @@ def create_backup_archive(backup_filepath): cli_log_path = CLI_LOG_DATA_PATH container_log_path = LOG_PATH pack_dir(SKALE_DIR, backup_filepath, exclude=(cli_log_path, container_log_path)) - print(f'Backup archive succesfully created {backup_filepath}') + print(f'Backup archive successfully created {backup_filepath}') def set_maintenance_mode_on(): diff --git a/node_cli/core/wallet.py b/node_cli/core/wallet.py index dd41db5a..15efe8d0 100644 --- a/node_cli/core/wallet.py +++ b/node_cli/core/wallet.py @@ -20,8 +20,6 @@ import json import logging -from node_cli.cli.info import TYPE -from node_cli.core.node import NodeType from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit, get_request, post_request from node_cli.utils.print_formatters import TEXTS, print_mirage_wallet_info, print_wallet_info @@ -37,10 +35,13 @@ def get_wallet_info(_format): if _format == 'json': print(json.dumps(payload)) else: - if TYPE == NodeType.MIRAGE: - print_mirage_wallet_info(payload) - else: - print_wallet_info(payload) + if type(payload) is str: + print(payload) + elif type(payload) is dict: + if payload.get('skale_balance'): + print_wallet_info(payload) + else: + print_mirage_wallet_info(payload) else: error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/node_cli/mirage/mirage_node.py b/node_cli/mirage/mirage_node.py index 2407a317..3a185138 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/mirage/mirage_node.py @@ -20,6 +20,7 @@ import logging import time +from typing import cast from node_cli.configs import DEFAULT_SKALED_BASE_PORT, RESTORE_SLEEP_TIMEOUT, SKALE_DIR from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH @@ -36,15 +37,32 @@ ) from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes -from node_cli.utils.helper import error_exit, post_request +from node_cli.utils.helper import error_exit, get_request, post_request from node_cli.utils.node_type import NodeType -from node_cli.utils.print_formatters import print_node_cmd_error +from node_cli.utils.print_formatters import print_node_cmd_error, print_node_info_mirage from node_cli.utils.texts import safe_load_texts logger = logging.getLogger(__name__) TEXTS = safe_load_texts() -NODE_BLUEPRINT_NAME = 'mirage-node' +BLUEPRINT_NAME = 'mirage-node' + + +def get_node_info_plain() -> dict: + status, payload = get_request(blueprint=BLUEPRINT_NAME, method='info') + node_payload: dict = cast(dict, payload) + if status == 'ok': + return node_payload['node'] + else: + error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +def get_node_info(format): + node_info = get_node_info_plain() + if format == 'json': + print(node_info) + else: + print_node_info_mirage(node_info) @check_not_inited @@ -143,7 +161,7 @@ def register(ip: str) -> None: return json_data = {'ip': ip, 'port': DEFAULT_SKALED_BASE_PORT} - status, payload = post_request(blueprint=NODE_BLUEPRINT_NAME, method='register', json=json_data) + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='register', json=json_data) if status == 'ok': msg = TEXTS['mirage']['node']['registered'] logger.info(msg) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 32269dfa..7adfba3b 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -210,14 +210,16 @@ def post_request(blueprint, method, json=None, files=None): return status, payload -def get_request(blueprint: str, method: str, params: Optional[dict] = None) -> tuple[str, str]: +def get_request( + blueprint: str, method: str, params: Optional[dict] = None +) -> tuple[str, str | dict]: route = get_route(blueprint, method) url = construct_url(route) try: response = requests.get(url, params=params) data = response.json() except Exception as err: - logger.error('Request failed', exc_info=err) + logger.exception('Request failed', exc_info=err) data = DEFAULT_ERROR_DATA status = data['status'] diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index 339c9c8f..e4c8aabf 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -51,8 +51,8 @@ def print_mirage_wallet_info(wallet): inspect.cleandoc(f""" {LONG_LINE} Address: {wallet['address'].lower()} - MIRAGE balance: {wallet['mirage_balance']} ETH - MIRAGE balance WEI: {wallet['mirage_balance_wei']} WEI + Node balance: {wallet['mirage_balance']} MIRAGE + Node balance WEI: {wallet['mirage_balance_wei']} MIRAGE WEI {LONG_LINE} """) ) @@ -258,6 +258,20 @@ def print_node_info(node, node_status): ) +def print_node_info_mirage(node): + print( + inspect.cleandoc(f""" + {LONG_LINE} + Node info + ID: {node['id']} + IP: {node['ip_str']} + Port: {node['port']} + Domain name: {node['domain_name']} + {LONG_LINE} + """) + ) + + def print_err_response(error_payload: Any) -> None: """Print formatted error message from API response payload. From b672888b5dcb7f65a98f487392c5aec8abcdf5df Mon Sep 17 00:00:00 2001 From: Dmytro Date: Sat, 12 Jul 2025 16:58:35 +0100 Subject: [PATCH 134/332] Remove fair node signature test --- tests/cli/mirage_cli_test.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/mirage_cli_test.py index d961aa56..fb7462cd 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/mirage_cli_test.py @@ -12,7 +12,6 @@ backup_node, migrate_node, restore_node, - signature_node, ) @@ -54,34 +53,6 @@ def test_mirage_node_backup(mock_backup_core, tmp_path): mock_backup_core.assert_called_once_with(backup_folder) -@mock.patch('node_cli.cli.mirage_node.get_node_signature') -def test_mirage_node_signature(mock_signature_core): - runner = CliRunner() - validator_id = '42' - signature_val = '0xabc123' - mock_signature_core.return_value = signature_val - - result = runner.invoke(signature_node, [validator_id]) - - assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' - mock_signature_core.assert_called_once_with(validator_id) - assert f'Signature: {signature_val}' in result.output - - -@mock.patch('node_cli.cli.mirage_node.get_node_signature') -def test_mirage_node_signature_error(mock_signature_core): - runner = CliRunner() - validator_id = '43' - error_msg = 'Core layer error' - mock_signature_core.return_value = {'error': True, 'message': error_msg} - - result = runner.invoke(signature_node, [validator_id]) - - assert result.exit_code != 0, f'Output: {result.output}\nException: {result.exception}' - mock_signature_core.assert_called_once_with(validator_id) - assert error_msg in result.output - - @mock.patch('node_cli.cli.mirage_boot.register') def test_mirage_boot_register(mock_register_core): runner = CliRunner() From de7ad288b9d6bfa8143668fecee3487e87d0069e Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 15 Jul 2025 13:12:55 +0100 Subject: [PATCH 135/332] Rename mirage to fair --- .github/workflows/publish.yml | 6 +- .github/workflows/test.yml | 10 +- README.md | 124 +++++++++--------- helper-scripts | 2 +- node_cli/cli/{mirage_boot.py => fair_boot.py} | 22 ++-- node_cli/cli/{mirage_node.py => fair_node.py} | 58 ++++---- node_cli/configs/__init__.py | 4 +- node_cli/configs/routes.py | 2 +- node_cli/configs/user.py | 24 ++-- node_cli/core/node.py | 26 ++-- node_cli/core/static_config.py | 10 +- node_cli/core/wallet.py | 4 +- node_cli/{mirage => fair}/__init__.py | 0 .../mirage_boot.py => fair/fair_boot.py} | 26 ++-- .../mirage_node.py => fair/fair_node.py} | 68 +++++----- node_cli/{mirage => fair}/record/__init__.py | 0 .../{mirage => fair}/record/chain_record.py | 12 +- .../{mirage => fair}/record/redis_record.py | 0 node_cli/main.py | 10 +- .../migrations/{mirage => fair}/from_boot.py | 2 +- node_cli/operations/__init__.py | 16 +-- node_cli/operations/base.py | 20 +-- node_cli/operations/{mirage.py => fair.py} | 50 +++---- node_cli/utils/docker_utils.py | 46 +++---- node_cli/utils/meta.py | 16 +-- node_cli/utils/node_type.py | 2 +- node_cli/utils/print_formatters.py | 8 +- scripts/build.sh | 6 +- scripts/generate_info.sh | 8 +- .../{mirage_cli_test.py => fair_cli_test.py} | 32 ++--- tests/cli/node_test.py | 2 +- tests/configs/configs_env_validate_test.py | 16 +-- tests/conftest.py | 6 +- tests/core/core_checks_test.py | 20 +-- tests/core/core_node_test.py | 36 ++--- tests/core/nginx_test.py | 10 +- tests/{mirage => fair}/__init__.py | 0 .../fair_node_test.py} | 122 ++++++++--------- tests/routes_test.py | 4 +- tests/tools_meta_test.py | 118 ++++++++--------- text.yml | 14 +- 41 files changed, 481 insertions(+), 481 deletions(-) rename node_cli/cli/{mirage_boot.py => fair_boot.py} (77%) rename node_cli/cli/{mirage_node.py => fair_node.py} (60%) rename node_cli/{mirage => fair}/__init__.py (100%) rename node_cli/{mirage/mirage_boot.py => fair/fair_boot.py} (73%) rename node_cli/{mirage/mirage_node.py => fair/fair_node.py} (71%) rename node_cli/{mirage => fair}/record/__init__.py (100%) rename node_cli/{mirage => fair}/record/chain_record.py (86%) rename node_cli/{mirage => fair}/record/redis_record.py (100%) rename node_cli/migrations/{mirage => fair}/from_boot.py (97%) rename node_cli/operations/{mirage.py => fair.py} (81%) mode change 100755 => 100644 scripts/build.sh mode change 100755 => 100644 scripts/generate_info.sh rename tests/cli/{mirage_cli_test.py => fair_cli_test.py} (75%) rename tests/{mirage => fair}/__init__.py (100%) rename tests/{mirage/mirage_node_test.py => fair/fair_node_test.py} (61%) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index d5d3bff4..3459f78f 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -8,8 +8,8 @@ on: - beta - stable - 'v*.*.*' - - 'mirage' - - 'mirage-*' + - 'fair' + - 'fair-*' jobs: create_release: @@ -72,7 +72,7 @@ jobs: strategy: matrix: os: [ubuntu-22.04] - build_type: [normal, sync, mirage] + build_type: [normal, sync, fair] steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 026ac8e8..9251c723 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -31,7 +31,7 @@ jobs: pip install -e ".[dev]" - name: Generate info - run: ./scripts/generate_info.sh 1.0.0 my-branch normal + run: bash ./scripts/generate_info.sh 1.0.0 my-branch normal - name: Check with ruff run: | @@ -57,15 +57,15 @@ jobs: - name: Check build - sync run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-sync - - name: Build binary - mirage + - name: Build binary - fair run: | mkdir -p ./dist docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test mirage + docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test fair docker rm -f $(docker ps -aq) - - name: Check build - mirage - run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-mirage + - name: Check build - fair + run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-fair - name: Run prepare test build run: | diff --git a/README.md b/README.md index 6353d4f1..5bf844f6 100644 --- a/README.md +++ b/README.md @@ -4,14 +4,14 @@ ![Test](https://github.com/skalenetwork/node-cli/workflows/Test/badge.svg) [![Discord](https://img.shields.io/discord/534485763354787851.svg)](https://discord.gg/vvUtWJB) -SKALE Node CLI, part of the SKALE suite of validator tools, is the command line interface to setup, register and maintain your SKALE node. It comes in three distinct build types: Standard (for validator nodes), Sync (for dedicated sChain synchronization), and Mirage (for the Mirage network). +SKALE Node CLI, part of the SKALE suite of validator tools, is the command line interface to setup, register and maintain your SKALE node. It comes in three distinct build types: Standard (for validator nodes), Sync (for dedicated sChain synchronization), and Fair (for the Fair network). ## Table of Contents 1. [Installation](#installation) 1. [Standard Node Binary](#standard-node-binary) 2. [Sync Node Binary](#sync-node-binary) - 3. [Mirage Node Binary](#mirage-node-binary) + 3. [Fair Node Binary](#fair-node-binary) 4. [Permissions and Testing](#permissions-and-testing) 2. [Standard Node Usage (`skale` - Normal Build)](#standard-node-usage-skale---normal-build) 1. [Top level commands (Standard)](#top-level-commands-standard) @@ -25,10 +25,10 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 3. [Sync Node Usage (`skale` - Sync Build)](#sync-node-usage-skale---sync-build) 1. [Top level commands (Sync)](#top-level-commands-sync) 2. [Sync node commands](#sync-node-commands) -4. [Mirage Node Usage (`mirage`)](#mirage-node-usage-mirage) - 1. [Top level commands (Mirage)](#top-level-commands-mirage) - 2. [Mirage Boot commands](#mirage-boot-commands) - 3. [Mirage Node commands](#mirage-node-commands) +4. [Fair Node Usage (`fair`)](#fair-node-usage-fair) + 1. [Top level commands (Fair)](#top-level-commands-fair) + 2. [Fair Boot commands](#fair-boot-commands) + 3. [Fair Node commands](#fair-node-commands) 5. [Exit codes](#exit-codes) 6. [Development](#development) @@ -60,14 +60,14 @@ VERSION_NUM={version} && \ sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-sync > /usr/local/bin/skale" ``` -### Mirage Node Binary +### Fair Node Binary -This binary (`skale-VERSION-OS-mirage`) is used specifically for managing nodes on the Mirage network. It is named `mirage`. +This binary (`skale-VERSION-OS-fair`) is used specifically for managing nodes on the Fair network. It is named `fair`. ```shell # Replace {version} with the desired release version (e.g., 2.6.0) VERSION_NUM={version} && \ -sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-mirage > /usr/local/bin/mirage" +sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-fair > /usr/local/bin/fair" ``` ### Permissions and Testing @@ -78,8 +78,8 @@ Apply executable permissions to the downloaded binary (adjust name accordingly): # For Standard or Sync binary sudo chmod +x /usr/local/bin/skale -# For Mirage binary -sudo chmod +x /usr/local/bin/mirage +# For Fair binary +sudo chmod +x /usr/local/bin/fair ``` Test the installation: @@ -88,8 +88,8 @@ Test the installation: # Standard or Sync build skale --help -# Mirage build -mirage --help +# Fair build +fair --help ``` --- @@ -647,44 +647,44 @@ Options: --- -## Mirage Node Usage (`mirage`) +## Fair Node Usage (`fair`) -Commands available in the **`mirage` binary** for managing nodes on the Mirage network. +Commands available in the **`fair` binary** for managing nodes on the Fair network. -### Top level commands (Mirage) +### Top level commands (Fair) -#### Mirage Info +#### Fair Info -Print build info for the `mirage` binary. +Print build info for the `fair` binary. ```shell -mirage info +fair info ``` -#### Mirage Version +#### Fair Version -Print version number for the `mirage` binary. +Print version number for the `fair` binary. ```shell -mirage version [--short] +fair version [--short] ``` Options: - `--short` - prints version only, without additional text. -### Mirage Boot commands +### Fair Boot commands -> Prefix: `mirage boot` +> Prefix: `fair boot` -Commands for a Mirage node in the Boot phase. +Commands for a Fair node in the Boot phase. -#### Mirage Boot Initialization +#### Fair Boot Initialization -Initialize the Mirage node boot phase. +Initialize the Fair node boot phase. ```shell -mirage boot init [ENV_FILE] +fair boot init [ENV_FILE] ``` Arguments: @@ -696,9 +696,9 @@ Required environment variables in `ENV_FILE`: - `SGX_SERVER_URL` - SGX server URL. - `DISK_MOUNTPOINT` - Mount point for storing data (BTRFS recommended). - `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` configs. -- `ENDPOINT` - RPC endpoint of the network where Mirage Manager is deployed. +- `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. - `MANAGER_CONTRACTS` - SKALE Manager alias or address. -- `IMA_CONTRACTS` - IMA alias or address (_Note: Required by boot service, may not be used by Mirage itself_). +- `IMA_CONTRACTS` - IMA alias or address (_Note: Required by boot service, may not be used by Fair itself_). - `FILEBEAT_HOST` - URL/IP:Port of the Filebeat log server. - `ENV_TYPE` - Environment type (e.g., 'mainnet', 'devnet'). @@ -706,42 +706,42 @@ Optional variables: - `MONITORING_CONTAINERS` - Enable monitoring containers (`cadvisor`, `node-exporter`). -#### Mirage Boot Registration +#### Fair Boot Registration -Register the Mirage node with Mirage Manager _during_ the boot phase. +Register the Fair node with Fair Manager _during_ the boot phase. ```shell -mirage boot register --name --ip --domain [--port ] +fair boot register --name --ip --domain [--port ] ``` Required arguments: -- `--name`/`-n` - Mirage node name. +- `--name`/`-n` - Fair node name. - `--ip` - Public IP for RPC connections and consensus. -- `--domain`/`-d` - Mirage node domain name (e.g., `mirage1.example.com`). +- `--domain`/`-d` - Fair node domain name (e.g., `fair1.example.com`). Optional arguments: - `--port`/`-p` - Base port for node sChains (default: `10000`). -#### Mirage Boot Signature +#### Fair Boot Signature Get the node signature for a validator ID _during_ the boot phase. ```shell -mirage boot signature +fair boot signature ``` Arguments: - `VALIDATOR_ID` - The ID of the validator requesting the signature. -#### Mirage Boot Migrate +#### Fair Boot Migrate -Migrate the Mirage node from the boot phase to the main phase (regular operation). +Migrate the Fair node from the boot phase to the main phase (regular operation). ```shell -mirage boot migrate [ENV_FILEPATH] [--yes] +fair boot migrate [ENV_FILEPATH] [--yes] ``` Arguments: @@ -752,72 +752,72 @@ Options: - `--yes` - Migrate without confirmation. -### Mirage Node commands +### Fair Node commands -> Prefix: `mirage node` +> Prefix: `fair node` -Commands for managing a Mirage node during its regular operation (main phase). +Commands for managing a Fair node during its regular operation (main phase). -#### Mirage Node Initialization (Placeholder) +#### Fair Node Initialization (Placeholder) -Initialize the regular operation phase of the Mirage node. +Initialize the regular operation phase of the Fair node. ```shell -mirage node init +fair node init ``` > **Note:** This command is currently a placeholder and not implemented. -#### Mirage Node Registration (Placeholder) +#### Fair Node Registration (Placeholder) Register the node during regular operation. ```shell -mirage node register +fair node register ``` > **Note:** This command is currently a placeholder and not implemented. -#### Mirage Node Update (Placeholder) +#### Fair Node Update (Placeholder) -Update the Mirage node during regular operation. +Update the Fair node during regular operation. ```shell -mirage node update [ENV_FILEPATH] [--yes] [--unsafe] +fair node update [ENV_FILEPATH] [--yes] [--unsafe] ``` > **Note:** This command is currently a placeholder and not implemented. -#### Mirage Node Signature +#### Fair Node Signature Get the node signature for a validator ID during regular operation. ```shell -mirage node signature +fair node signature ``` Arguments: - `VALIDATOR_ID` - The ID of the validator requesting the signature. -#### Mirage Node Backup +#### Fair Node Backup -Generate a backup archive of the Mirage node's state. +Generate a backup archive of the Fair node's state. ```shell -mirage node backup +fair node backup ``` Arguments: - `BACKUP_FOLDER_PATH` - Path to the folder where the backup file will be saved. -#### Mirage Node Restore +#### Fair Node Restore -Restore a Mirage node from a backup archive. +Restore a Fair node from a backup archive. ```shell -mirage node restore [--config-only] +fair node restore [--config-only] ``` Arguments: @@ -887,7 +887,7 @@ pip install -e ".[dev]" #### Generate info.py locally -Specify the build type (`normal`, `sync`, or `mirage`): +Specify the build type (`normal`, `sync`, or `fair`): ```shell # Example for Standard build @@ -896,8 +896,8 @@ Specify the build type (`normal`, `sync`, or `mirage`): # Example for Sync build ./scripts/generate_info.sh 1.0.0 my-branch sync -# Example for Mirage build -./scripts/generate_info.sh 1.0.0 my-branch mirage +# Example for Fair build +./scripts/generate_info.sh 1.0.0 my-branch fair ``` #### Add linting git hook diff --git a/helper-scripts b/helper-scripts index 5c5bf3a0..808c768f 160000 --- a/helper-scripts +++ b/helper-scripts @@ -1 +1 @@ -Subproject commit 5c5bf3a09500d605d72b53e2da8e871ffca1403a +Subproject commit 808c768feebfa99d9148e076b5b6b24b1b340734 diff --git a/node_cli/cli/mirage_boot.py b/node_cli/cli/fair_boot.py similarity index 77% rename from node_cli/cli/mirage_boot.py rename to node_cli/cli/fair_boot.py index 8f5bf44e..703bea88 100644 --- a/node_cli/cli/mirage_boot.py +++ b/node_cli/cli/fair_boot.py @@ -22,36 +22,36 @@ from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.core.node import get_node_info, get_node_signature from node_cli.core.node import register_node as register -from node_cli.mirage.mirage_boot import init, update +from node_cli.fair.fair_boot import init, update from node_cli.utils.helper import IP_TYPE, abort_if_false, error_exit, streamed_cmd @click.group() -def mirage_boot_cli(): +def fair_boot_cli(): pass -@mirage_boot_cli.group(help='Commands for the Mirage Boot phase.') +@fair_boot_cli.group(help='Commands for the Fair Boot phase.') def boot(): pass -@boot.command('info', help='Get info about Mirage node (Boot Phase).') +@boot.command('info', help='Get info about Fair node (Boot Phase).') @click.option('--format', '-f', type=click.Choice(['json', 'text'])) -def mirage_boot_info(format): +def fair_boot_info(format): get_node_info(format) -@boot.command('init', help='Initialize Mirage node (Boot Phase).') +@boot.command('init', help='Initialize Fair node (Boot Phase).') @click.argument('env_file') @streamed_cmd def init_boot(env_file): init(env_file) -@boot.command('register', help='Register Mirage node in SKALE Manager (during Boot Phase).') +@boot.command('register', help='Register Fair node in SKALE Manager (during Boot Phase).') @click.option( - '--name', '-n', required=True, prompt='Enter mirage node name', help='Mirage node name' + '--name', '-n', required=True, prompt='Enter fair node name', help='Fair node name' ) @click.option( '--ip', @@ -68,7 +68,7 @@ def register_boot(name, ip, port, domain): register(name=name, p2p_ip=ip, public_ip=ip, port=port, domain_name=domain) -@boot.command('signature', help='Get mirage node signature for a validator ID (during Boot Phase).') +@boot.command('signature', help='Get fair node signature for a validator ID (during Boot Phase).') @click.argument('validator_id') def signature_boot(validator_id): res = get_node_signature(validator_id) @@ -77,13 +77,13 @@ def signature_boot(validator_id): print(f'Signature: {res}') -@boot.command('update', help='Update Mirage node from .env file') +@boot.command('update', help='Update Fair node from .env file') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to update Mirage node software?', + prompt='Are you sure you want to update Fair node software?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) @click.argument('env_file') diff --git a/node_cli/cli/mirage_node.py b/node_cli/cli/fair_node.py similarity index 60% rename from node_cli/cli/mirage_node.py rename to node_cli/cli/fair_node.py index e76e4f4b..b6461871 100644 --- a/node_cli/cli/mirage_node.py +++ b/node_cli/cli/fair_node.py @@ -20,16 +20,16 @@ import click from node_cli.core.node import backup -from node_cli.mirage.mirage_node import cleanup as mirage_cleanup -from node_cli.mirage.mirage_node import init as init_mirage -from node_cli.mirage.mirage_node import ( +from node_cli.fair.fair_node import cleanup as fair_cleanup +from node_cli.fair.fair_node import init as init_fair +from node_cli.fair.fair_node import ( migrate_from_boot, request_repair, - restore_mirage, + restore_fair, get_node_info, ) -from node_cli.mirage.mirage_node import register as register_mirage -from node_cli.mirage.mirage_node import update as update_mirage +from node_cli.fair.fair_node import register as register_fair +from node_cli.fair.fair_node import update as update_fair from node_cli.utils.helper import IP_TYPE, URL_TYPE, abort_if_false, streamed_cmd from node_cli.utils.texts import safe_load_texts @@ -37,57 +37,57 @@ @click.group() -def mirage_node_cli(): +def fair_node_cli(): pass -@mirage_node_cli.group(help='Commands for regular Mirage Node operations.') +@fair_node_cli.group(help='Commands for regular Fair Node operations.') def node(): pass -@node.command('info', help='Get info about Mirage node.') +@node.command('info', help='Get info about Fair node.') @click.option('--format', '-f', type=click.Choice(['json', 'text'])) -def mirage_node_info(format): +def fair_node_info(format): get_node_info(format) -@node.command('init', help='Initialize regular Mirage node') +@node.command('init', help='Initialize regular Fair node') @click.argument('env_filepath') @streamed_cmd def init_node(env_filepath: str): - init_mirage(env_filepath=env_filepath) + init_fair(env_filepath=env_filepath) -@node.command('register', help=TEXTS['mirage']['node']['register']['help']) -@click.option('--ip', required=True, type=IP_TYPE, help=TEXTS['mirage']['node']['register']['ip']) +@node.command('register', help=TEXTS['fair']['node']['register']['help']) +@click.option('--ip', required=True, type=IP_TYPE, help=TEXTS['fair']['node']['register']['ip']) def register(ip: str) -> None: - register_mirage(ip=ip) + register_fair(ip=ip) -@node.command('update', help='Update Mirage node') +@node.command('update', help='Update Fair node') @click.argument('env_filepath') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to update Mirage node software?', + prompt='Are you sure you want to update Fair node software?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) @streamed_cmd def update_node(env_filepath: str, pull_config_for_schain): - update_mirage(env_filepath=env_filepath, pull_config_for_schain=pull_config_for_schain) + update_fair(env_filepath=env_filepath, pull_config_for_schain=pull_config_for_schain) -@node.command('backup', help='Generate backup file for the Mirage node.') +@node.command('backup', help='Generate backup file for the Fair node.') @click.argument('backup_folder_path') @streamed_cmd def backup_node(backup_folder_path): backup(backup_folder_path) -@node.command('restore', help='Restore Mirage node from a backup file.') +@node.command('restore', help='Restore Fair node from a backup file.') @click.argument('backup_path') @click.argument('env_file') @click.option( @@ -98,50 +98,50 @@ def backup_node(backup_folder_path): ) @streamed_cmd def restore_node(backup_path, env_file, config_only): - restore_mirage(backup_path, env_file, config_only) + restore_fair(backup_path, env_file, config_only) -@node.command('migrate', help='Switch from boot to regular Mirage node.') +@node.command('migrate', help='Switch from boot to regular Fair node.') @click.argument('env_filepath') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to migrate to regular Mirage node? The action cannot be undone', + prompt='Are you sure you want to migrate to regular Fair node? The action cannot be undone', ) @streamed_cmd def migrate_node(env_filepath: str) -> None: migrate_from_boot(env_filepath=env_filepath) -@node.command('repair', help='Toggle mirage chain repair mode') +@node.command('repair', help='Toggle fair chain repair mode') @click.option( '--snapshot-from', type=URL_TYPE, default='', hidden=True, - help=TEXTS['mirage']['node']['repair']['snapshot_from'], + help=TEXTS['fair']['node']['repair']['snapshot_from'], ) @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt=TEXTS['mirage']['node']['repair']['warning'], + prompt=TEXTS['fair']['node']['repair']['warning'], ) def repair(snapshot_from: str = '') -> None: request_repair(snapshot_from=snapshot_from) -@node.command('cleanup', help='Cleanup Mirage node.') +@node.command('cleanup', help='Cleanup Fair node.') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to cleanup Mirage node?', + prompt='Are you sure you want to cleanup Fair node?', ) @streamed_cmd def cleanup_node(): - mirage_cleanup() + fair_cleanup() diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index a360ab9a..6709ee75 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -56,9 +56,9 @@ COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') SYNC_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-sync.yml') -MIRAGE_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-mirage.yml') +FAIR_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-fair.yml') STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') -MIRAGE_STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'mirage_static_params.yaml') +FAIR_STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'fair_static_params.yaml') NGINX_TEMPLATE_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'nginx.conf.j2') NGINX_CONFIG_FILEPATH = os.path.join(NODE_DATA_PATH, 'nginx.conf') diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index acdf0118..8afc10d9 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -40,7 +40,7 @@ 'schains': ['config', 'list', 'dkg-statuses', 'firewall-rules', 'repair', 'get'], 'ssl': ['status', 'upload'], 'wallet': ['info', 'send-eth'], - 'mirage-node': ['info', 'register'], + 'fair-node': ['info', 'register'], } } diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index f1d07cc6..49f44d58 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -81,15 +81,15 @@ def validate_params(cls, params: Dict) -> ValidationResult: @dataclass -class MirageUserConfig(BaseUserConfig): - mirage_contracts: str +class FairUserConfig(BaseUserConfig): + fair_contracts: str boot_endpoint: str sgx_server_url: str enforce_btrfs: str = '' @dataclass -class MirageBootUserConfig(BaseUserConfig): +class FairBootUserConfig(BaseUserConfig): endpoint: str manager_contracts: str ima_contracts: str @@ -127,10 +127,10 @@ class SyncUserConfig(BaseUserConfig): def get_validated_user_config( node_type: NodeType, env_filepath: str = SKALE_DIR_ENV_FILEPATH, - is_mirage_boot: bool = False, + is_fair_boot: bool = False, ) -> BaseUserConfig: params = parse_env_file(env_filepath) - user_config_class = get_user_config_class(node_type, is_mirage_boot) + user_config_class = get_user_config_class(node_type, is_fair_boot) _, missing_params, extra_params = user_config_class.validate_params(params) if len(missing_params) > 0: @@ -149,12 +149,12 @@ def get_validated_user_config( def validate_user_config(user_config: BaseUserConfig) -> None: validate_env_type(env_type=user_config.env_type) - if not isinstance(user_config, MirageUserConfig): + if not isinstance(user_config, FairUserConfig): validate_alias_or_address( user_config.manager_contracts, ContractType.MANAGER, user_config.endpoint ) - if isinstance(user_config, (SkaleUserConfig, MirageBootUserConfig)): + if isinstance(user_config, (SkaleUserConfig, FairBootUserConfig)): validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, user_config.endpoint) @@ -170,12 +170,12 @@ def parse_env_file(env_filepath: str) -> Dict: def get_user_config_class( node_type: NodeType, - is_mirage_boot: bool = False, + is_fair_boot: bool = False, ) -> type[BaseUserConfig]: - if node_type == NodeType.MIRAGE and is_mirage_boot: - user_config_class = MirageBootUserConfig - elif node_type == NodeType.MIRAGE: - user_config_class = MirageUserConfig + if node_type == NodeType.FAIR and is_fair_boot: + user_config_class = FairBootUserConfig + elif node_type == NodeType.FAIR: + user_config_class = FairUserConfig elif node_type == NodeType.SYNC: user_config_class = SyncUserConfig else: diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 4d41dcde..f5e0a16b 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -78,8 +78,8 @@ is_api_running, BASE_SKALE_COMPOSE_SERVICES, BASE_SYNC_COMPOSE_SERVICES, - BASE_MIRAGE_COMPOSE_SERVICES, - BASE_MIRAGE_BOOT_COMPOSE_SERVICES, + BASE_FAIR_COMPOSE_SERVICES, + BASE_FAIR_BOOT_COMPOSE_SERVICES, ) from node_cli.utils.node_type import NodeType from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 @@ -230,13 +230,13 @@ def compose_node_env( sync_schains: Optional[bool] = None, pull_config_for_schain: Optional[str] = None, save: bool = True, - is_mirage_boot: bool = False, + is_fair_boot: bool = False, ) -> dict[str, str]: if env_filepath is not None: user_config = get_validated_user_config( node_type=node_type, env_filepath=env_filepath, - is_mirage_boot=is_mirage_boot, + is_fair_boot=is_fair_boot, ) if save: save_env_params(env_filepath) @@ -244,10 +244,10 @@ def compose_node_env( user_config = get_validated_user_config( node_type=node_type, env_filepath=INIT_ENV_FILEPATH, - is_mirage_boot=is_mirage_boot, + is_fair_boot=is_fair_boot, ) - if node_type == NodeType.SYNC or node_type == NodeType.MIRAGE: + if node_type == NodeType.SYNC or node_type == NodeType.FAIR: mnt_dir = SCHAINS_MNT_DIR_SINGLE_CHAIN else: mnt_dir = SCHAINS_MNT_DIR_REGULAR @@ -428,11 +428,11 @@ def turn_on(maintenance_off, sync_schains, env_file, node_type: NodeType) -> Non set_maintenance_mode_off() -def get_expected_container_names(node_type: NodeType, is_mirage_boot: bool) -> list[str]: - if node_type == NodeType.MIRAGE and is_mirage_boot: - services = BASE_MIRAGE_BOOT_COMPOSE_SERVICES - elif node_type == NodeType.MIRAGE and not is_mirage_boot: - services = BASE_MIRAGE_COMPOSE_SERVICES +def get_expected_container_names(node_type: NodeType, is_fair_boot: bool) -> list[str]: + if node_type == NodeType.FAIR and is_fair_boot: + services = BASE_FAIR_BOOT_COMPOSE_SERVICES + elif node_type == NodeType.FAIR and not is_fair_boot: + services = BASE_FAIR_COMPOSE_SERVICES elif node_type == NodeType.SYNC: services = BASE_SYNC_COMPOSE_SERVICES else: @@ -441,8 +441,8 @@ def get_expected_container_names(node_type: NodeType, is_mirage_boot: bool) -> l return list(services.values()) -def is_base_containers_alive(node_type: NodeType, is_mirage_boot: bool = False) -> bool: - base_container_names = get_expected_container_names(node_type, is_mirage_boot) +def is_base_containers_alive(node_type: NodeType, is_fair_boot: bool = False) -> bool: + base_container_names = get_expected_container_names(node_type, is_fair_boot) dclient = docker.from_env() running_container_names = set(container.name for container in dclient.containers.list()) diff --git a/node_cli/core/static_config.py b/node_cli/core/static_config.py index 4bc84f42..a93a7a60 100644 --- a/node_cli/core/static_config.py +++ b/node_cli/core/static_config.py @@ -23,7 +23,7 @@ from node_cli.configs import ( CONTAINER_CONFIG_PATH, - MIRAGE_STATIC_PARAMS_FILEPATH, + FAIR_STATIC_PARAMS_FILEPATH, STATIC_PARAMS_FILEPATH, ) from node_cli.utils.node_type import NodeType @@ -34,8 +34,8 @@ def get_static_params( env_type: str = 'mainnet', config_path: str = CONTAINER_CONFIG_PATH, ) -> dict: - if node_type == NodeType.MIRAGE: - static_params_base_filepath = MIRAGE_STATIC_PARAMS_FILEPATH + if node_type == NodeType.FAIR: + static_params_base_filepath = FAIR_STATIC_PARAMS_FILEPATH else: static_params_base_filepath = STATIC_PARAMS_FILEPATH @@ -46,7 +46,7 @@ def get_static_params( return ydata['envs'][env_type] -def get_mirage_chain_name(env: dict) -> str: - node_type = NodeType.MIRAGE +def get_fair_chain_name(env: dict) -> str: + node_type = NodeType.FAIR params = get_static_params(node_type, env['ENV_TYPE']) return params['info']['chain_name'] diff --git a/node_cli/core/wallet.py b/node_cli/core/wallet.py index 15efe8d0..467b675b 100644 --- a/node_cli/core/wallet.py +++ b/node_cli/core/wallet.py @@ -22,7 +22,7 @@ from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit, get_request, post_request -from node_cli.utils.print_formatters import TEXTS, print_mirage_wallet_info, print_wallet_info +from node_cli.utils.print_formatters import TEXTS, print_fair_wallet_info, print_wallet_info logger = logging.getLogger(__name__) @@ -41,7 +41,7 @@ def get_wallet_info(_format): if payload.get('skale_balance'): print_wallet_info(payload) else: - print_mirage_wallet_info(payload) + print_fair_wallet_info(payload) else: error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/node_cli/mirage/__init__.py b/node_cli/fair/__init__.py similarity index 100% rename from node_cli/mirage/__init__.py rename to node_cli/fair/__init__.py diff --git a/node_cli/mirage/mirage_boot.py b/node_cli/fair/fair_boot.py similarity index 73% rename from node_cli/mirage/mirage_boot.py rename to node_cli/fair/fair_boot.py index 31c95cb9..290bbb1e 100644 --- a/node_cli/mirage/mirage_boot.py +++ b/node_cli/fair/fair_boot.py @@ -23,7 +23,7 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.operations import init_mirage_boot_op, update_mirage_boot_op +from node_cli.operations import init_fair_boot_op, update_fair_boot_op from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit @@ -37,37 +37,37 @@ def init(env_filepath: str) -> None: env = compose_node_env( env_filepath, - node_type=NodeType.MIRAGE, - is_mirage_boot=True, + node_type=NodeType.FAIR, + is_fair_boot=True, ) - init_mirage_boot_op(env_filepath, env) - logger.info('Waiting for mirage containers initialization') + init_fair_boot_op(env_filepath, env) + logger.info('Waiting for fair containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True): + if not is_base_containers_alive(node_type=NodeType.FAIR, is_fair_boot=True): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) - logger.info('Init mirage procedure finished') + logger.info('Init fair procedure finished') @check_inited @check_user def update(env_filepath: str, pull_config_for_schain: str) -> None: - logger.info('Mirage boot node update started') + logger.info('Fair boot node update started') env = compose_node_env( env_filepath, inited_node=True, sync_schains=False, pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.MIRAGE, - is_mirage_boot=True, + node_type=NodeType.FAIR, + is_fair_boot=True, ) - migrate_ok = update_mirage_boot_op(env_filepath, env) + migrate_ok = update_fair_boot_op(env_filepath, env) if migrate_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=NodeType.MIRAGE, is_mirage_boot=True) + alive = is_base_containers_alive(node_type=NodeType.FAIR, is_fair_boot=True) if not migrate_ok or not alive: print_node_cmd_error() return else: - logger.info('Mirage boot node update finished successfully!') + logger.info('Fair boot node update finished successfully!') diff --git a/node_cli/mirage/mirage_node.py b/node_cli/fair/fair_node.py similarity index 71% rename from node_cli/mirage/mirage_node.py rename to node_cli/fair/fair_node.py index 3a185138..e32149e9 100644 --- a/node_cli/mirage/mirage_node.py +++ b/node_cli/fair/fair_node.py @@ -27,25 +27,25 @@ from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.host import is_node_inited, save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.mirage.record.chain_record import get_mirage_chain_record +from node_cli.fair.record.chain_record import get_fair_chain_record from node_cli.operations import ( - MirageUpdateType, - cleanup_mirage_op, - init_mirage_op, - restore_mirage_op, - update_mirage_op, + FairUpdateType, + cleanup_fair_op, + init_fair_op, + restore_fair_op, + update_fair_op, ) from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit, get_request, post_request from node_cli.utils.node_type import NodeType -from node_cli.utils.print_formatters import print_node_cmd_error, print_node_info_mirage +from node_cli.utils.print_formatters import print_node_cmd_error, print_node_info_fair from node_cli.utils.texts import safe_load_texts logger = logging.getLogger(__name__) TEXTS = safe_load_texts() -BLUEPRINT_NAME = 'mirage-node' +BLUEPRINT_NAME = 'fair-node' def get_node_info_plain() -> dict: @@ -62,22 +62,22 @@ def get_node_info(format): if format == 'json': print(node_info) else: - print_node_info_mirage(node_info) + print_node_info_fair(node_info) @check_not_inited -def restore_mirage(backup_path, env_filepath, config_only=False): - env = compose_node_env(env_filepath, node_type=NodeType.MIRAGE) +def restore_fair(backup_path, env_filepath, config_only=False): + env = compose_node_env(env_filepath, node_type=NodeType.FAIR) if env is None: return save_env_params(env_filepath) env['SKALE_DIR'] = SKALE_DIR - restored_ok = restore_mirage_op(env, backup_path, config_only=config_only) + restored_ok = restore_fair_op(env, backup_path, config_only=config_only) if not restored_ok: error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) time.sleep(RESTORE_SLEEP_TIMEOUT) - print('Mirage node is restored from backup') + print('Fair node is restored from backup') @check_inited @@ -85,85 +85,85 @@ def restore_mirage(backup_path, env_filepath, config_only=False): def migrate_from_boot( env_filepath: str, ) -> None: - logger.info('Migrating from boot to mirage node...') + logger.info('Migrating from boot to fair node...') env = compose_node_env( env_filepath, inited_node=True, sync_schains=False, - node_type=NodeType.MIRAGE, + node_type=NodeType.FAIR, ) - migrate_ok = update_mirage_op(env_filepath, env, update_type=MirageUpdateType.FROM_BOOT) - alive = is_base_containers_alive(node_type=NodeType.MIRAGE) + migrate_ok = update_fair_op(env_filepath, env, update_type=FairUpdateType.FROM_BOOT) + alive = is_base_containers_alive(node_type=NodeType.FAIR) if not migrate_ok or not alive: print_node_cmd_error() return else: - logger.info('Migration from boot to mirage completed successfully') + logger.info('Migration from boot to fair completed successfully') @check_inited @check_user def update(env_filepath: str, pull_config_for_schain: str | None = None) -> None: - logger.info('Updating mirage node...') + logger.info('Updating fair node...') env = compose_node_env( env_filepath, inited_node=True, sync_schains=False, - node_type=NodeType.MIRAGE, + node_type=NodeType.FAIR, pull_config_for_schain=pull_config_for_schain, ) - update_ok = update_mirage_op(env_filepath, env, update_type=MirageUpdateType.REGULAR) - alive = is_base_containers_alive(node_type=NodeType.MIRAGE) + update_ok = update_fair_op(env_filepath, env, update_type=FairUpdateType.REGULAR) + alive = is_base_containers_alive(node_type=NodeType.FAIR) if not update_ok or not alive: print_node_cmd_error() return else: - logger.info('Mirage update completed successfully') + logger.info('Fair update completed successfully') def request_repair(snapshot_from: str = '') -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE) - record = get_mirage_chain_record(env) + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) + record = get_fair_chain_record(env) record.set_repair_ts(int(time.time())) record.set_snapshot_from(snapshot_from) - print(TEXTS['mirage']['node']['repair']['repair_requested']) + print(TEXTS['fair']['node']['repair']['repair_requested']) @check_inited @check_user def cleanup() -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE) - cleanup_mirage_op(env) - logger.info('Mirage node was cleaned up, all containers and data removed') + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) + cleanup_fair_op(env) + logger.info('Fair node was cleaned up, all containers and data removed') cleanup_docker_configuration() @check_not_inited def init(env_filepath: str) -> None: - env = compose_node_env(env_filepath, node_type=NodeType.MIRAGE) + env = compose_node_env(env_filepath, node_type=NodeType.FAIR) if env is None: return save_env_params(env_filepath) env['SKALE_DIR'] = SKALE_DIR - init_ok = init_mirage_op(env_filepath, env) + init_ok = init_fair_op(env_filepath, env) if not init_ok: error_exit('Init operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) time.sleep(RESTORE_SLEEP_TIMEOUT) - print('Mirage node is initialized') + print('Fair node is initialized') @check_inited @check_user def register(ip: str) -> None: if not is_node_inited(): - print(TEXTS['mirage']['node']['not_inited']) + print(TEXTS['fair']['node']['not_inited']) return json_data = {'ip': ip, 'port': DEFAULT_SKALED_BASE_PORT} status, payload = post_request(blueprint=BLUEPRINT_NAME, method='register', json=json_data) if status == 'ok': - msg = TEXTS['mirage']['node']['registered'] + msg = TEXTS['fair']['node']['registered'] logger.info(msg) print(msg) else: diff --git a/node_cli/mirage/record/__init__.py b/node_cli/fair/record/__init__.py similarity index 100% rename from node_cli/mirage/record/__init__.py rename to node_cli/fair/record/__init__.py diff --git a/node_cli/mirage/record/chain_record.py b/node_cli/fair/record/chain_record.py similarity index 86% rename from node_cli/mirage/record/chain_record.py rename to node_cli/fair/record/chain_record.py index 36e0dbeb..a97ca9fe 100644 --- a/node_cli/mirage/record/chain_record.py +++ b/node_cli/fair/record/chain_record.py @@ -22,8 +22,8 @@ from typing import cast from datetime import datetime -from node_cli.core.static_config import get_mirage_chain_name -from node_cli.mirage.record.redis_record import FlatRedisRecord, FieldInfo +from node_cli.core.static_config import get_fair_chain_name +from node_cli.fair.record.redis_record import FlatRedisRecord, FieldInfo logger = logging.getLogger(__name__) @@ -70,12 +70,12 @@ def set_repair_ts(self, value: int | None) -> None: self._set_field('repair_ts', value) -def get_mirage_chain_record(env: dict) -> ChainRecord: - return ChainRecord(get_mirage_chain_name(env)) +def get_fair_chain_record(env: dict) -> ChainRecord: + return ChainRecord(get_fair_chain_name(env)) def migrate_chain_record(env: dict) -> None: version = env['CONTAINER_CONFIGS_STREAM'] - logger.info('Migrating mirage chain record, setting config version to %s', version) - record = get_mirage_chain_record(env) + logger.info('Migrating fair chain record, setting config version to %s', version) + record = get_fair_chain_record(env) record.set_config_version(version) diff --git a/node_cli/mirage/record/redis_record.py b/node_cli/fair/record/redis_record.py similarity index 100% rename from node_cli/mirage/record/redis_record.py rename to node_cli/fair/record/redis_record.py diff --git a/node_cli/main.py b/node_cli/main.py index 8ec7420a..28d87fe0 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -38,8 +38,8 @@ from node_cli.cli.ssl import ssl_cli from node_cli.cli.resources_allocation import resources_allocation_cli from node_cli.cli.sync_node import sync_node_cli -from node_cli.cli.mirage_boot import mirage_boot_cli -from node_cli.cli.mirage_node import mirage_node_cli +from node_cli.cli.fair_boot import fair_boot_cli +from node_cli.cli.fair_node import fair_node_cli from node_cli.core.host import init_logs_dir from node_cli.utils.node_type import NodeType from node_cli.configs import LONG_LINE @@ -85,12 +85,12 @@ def info(): def get_sources_list() -> List[click.MultiCommand]: if TYPE == NodeType.SYNC: return [cli, sync_node_cli, ssl_cli] - elif TYPE == NodeType.MIRAGE: + elif TYPE == NodeType.FAIR: return [ cli, logs_cli, - mirage_boot_cli, - mirage_node_cli, + fair_boot_cli, + fair_node_cli, wallet_cli, ssl_cli, ] diff --git a/node_cli/migrations/mirage/from_boot.py b/node_cli/migrations/fair/from_boot.py similarity index 97% rename from node_cli/migrations/mirage/from_boot.py rename to node_cli/migrations/fair/from_boot.py index 04578e68..6affdff3 100644 --- a/node_cli/migrations/mirage/from_boot.py +++ b/node_cli/migrations/fair/from_boot.py @@ -9,7 +9,7 @@ logger = logging.getLogger(__name__) NFT_CHAIN_BASE_PATH = '/etc/nft.conf.d/skale/chains' -NFT_COMMITTEE_SCOPE_CHAIN_NAME = 'mirage-committee' +NFT_COMMITTEE_SCOPE_CHAIN_NAME = 'fair-committee' class NoLegacyNFTChainConfigError(Exception): diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 12c9ef63..4f3aa410 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -21,8 +21,8 @@ update as update_op, init as init_op, init_sync as init_sync_op, - init_mirage_boot as init_mirage_boot_op, - update_mirage_boot as update_mirage_boot_op, + init_fair_boot as init_fair_boot_op, + update_fair_boot as update_fair_boot_op, update_sync as update_sync_op, turn_off as turn_off_op, turn_on as turn_on_op, @@ -30,10 +30,10 @@ cleanup_sync as cleanup_sync_op, configure_nftables, ) -from node_cli.operations.mirage import ( # noqa - init as init_mirage_op, - update_mirage as update_mirage_op, - MirageUpdateType, - restore_mirage as restore_mirage_op, - cleanup as cleanup_mirage_op, +from node_cli.operations.fair import ( # noqa + init as init_fair_op, + update_fair as update_fair_op, + FairUpdateType, + restore_fair as restore_fair_op, + cleanup as cleanup_fair_op, ) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 7cf51910..a5e29a4c 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -66,7 +66,7 @@ remove_dynamic_containers, ) from node_cli.utils.helper import rm_dir, str_to_bool -from node_cli.utils.meta import CliMetaManager, MirageCliMetaManager +from node_cli.utils.meta import CliMetaManager, FairCliMetaManager from node_cli.utils.node_type import NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks @@ -151,8 +151,8 @@ def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: @checked_host -def update_mirage_boot(env_filepath: str, env: Dict) -> bool: - compose_rm(node_type=NodeType.MIRAGE, env=env) +def update_fair_boot(env_filepath: str, env: Dict) -> bool: + compose_rm(node_type=NodeType.FAIR, env=env) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -170,7 +170,7 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: prepare_host(env_filepath, env['ENV_TYPE']) - meta_manager = MirageCliMetaManager() + meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: @@ -187,8 +187,8 @@ def update_mirage_boot(env_filepath: str, env: Dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.MIRAGE) - compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) + update_images(env=env, node_type=NodeType.FAIR) + compose_up(env=env, node_type=NodeType.FAIR, is_fair_boot=True) return True @@ -228,7 +228,7 @@ def init(env_filepath: str, env: dict, node_type: NodeType) -> None: @checked_host -def init_mirage_boot(env_filepath: str, env: dict) -> None: +def init_fair_boot(env_filepath: str, env: dict) -> None: sync_skale_node() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -247,16 +247,16 @@ def init_mirage_boot(env_filepath: str, env: dict) -> None: generate_nginx_config() prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') - meta_manager = MirageCliMetaManager() + meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.MIRAGE) + update_images(env=env, node_type=NodeType.FAIR) - compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) + compose_up(env=env, node_type=NodeType.FAIR, is_fair_boot=True) def init_sync( diff --git a/node_cli/operations/mirage.py b/node_cli/operations/fair.py similarity index 81% rename from node_cli/operations/mirage.py rename to node_cli/operations/fair.py index b30c5038..0471c399 100644 --- a/node_cli/operations/mirage.py +++ b/node_cli/operations/fair.py @@ -36,8 +36,8 @@ from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_datadir_for_single_chain_node -from node_cli.migrations.mirage.from_boot import migrate_nftables_from_boot -from node_cli.mirage.record.chain_record import migrate_chain_record +from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot +from node_cli.fair.record.chain_record import migrate_chain_record from node_cli.operations.base import checked_host, turn_off from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive from node_cli.operations.config_repo import ( @@ -56,13 +56,13 @@ wait_for_container, ) from node_cli.utils.helper import rm_dir, str_to_bool -from node_cli.utils.meta import MirageCliMetaManager +from node_cli.utils.meta import FairCliMetaManager from node_cli.utils.print_formatters import print_failed_requirements_checks logger = logging.getLogger(__name__) -class MirageUpdateType(Enum): +class FairUpdateType(Enum): REGULAR = 'regular' INFRA_ONLY = 'infra_only' FROM_BOOT = 'from_boot' @@ -87,23 +87,23 @@ def init(env_filepath: str, env: dict) -> bool: prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') - meta_manager = MirageCliMetaManager() + meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.MIRAGE) - compose_up(env=env, node_type=NodeType.MIRAGE) + update_images(env=env, node_type=NodeType.FAIR) + compose_up(env=env, node_type=NodeType.FAIR) wait_for_container(REDIS_SERVICE_DICT['redis']) time.sleep(REDIS_START_TIMEOUT) return True @checked_host -def update_mirage_boot(env_filepath: str, env: dict) -> bool: - compose_rm(node_type=NodeType.MIRAGE, env=env) +def update_fair_boot(env_filepath: str, env: dict) -> bool: + compose_rm(node_type=NodeType.FAIR, env=env) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -121,7 +121,7 @@ def update_mirage_boot(env_filepath: str, env: dict) -> bool: prepare_host(env_filepath, env['ENV_TYPE']) - meta_manager = MirageCliMetaManager() + meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: @@ -138,15 +138,15 @@ def update_mirage_boot(env_filepath: str, env: dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.MIRAGE) - compose_up(env=env, node_type=NodeType.MIRAGE, is_mirage_boot=True) + update_images(env=env, node_type=NodeType.FAIR) + compose_up(env=env, node_type=NodeType.FAIR, is_fair_boot=True) return True @checked_host -def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) -> bool: - compose_rm(node_type=NodeType.MIRAGE, env=env) - if update_type not in (MirageUpdateType.INFRA_ONLY, MirageUpdateType.FROM_BOOT): +def update_fair(env_filepath: str, env: dict, update_type: FairUpdateType) -> bool: + compose_rm(node_type=NodeType.FAIR, env=env) + if update_type not in (FairUpdateType.INFRA_ONLY, FairUpdateType.FROM_BOOT): remove_dynamic_containers() sync_skale_node() @@ -159,7 +159,7 @@ def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) - generate_nginx_config() prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) - meta_manager = MirageCliMetaManager() + meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: @@ -177,22 +177,22 @@ def update_mirage(env_filepath: str, env: dict, update_type: MirageUpdateType) - distro.version(), ) - if update_type == MirageUpdateType.FROM_BOOT: + if update_type == FairUpdateType.FROM_BOOT: migrate_nftables_from_boot() - update_images(env=env, node_type=NodeType.MIRAGE) + update_images(env=env, node_type=NodeType.FAIR) - compose_up(env=env, node_type=NodeType.MIRAGE, services=list(REDIS_SERVICE_DICT)) + compose_up(env=env, node_type=NodeType.FAIR, services=list(REDIS_SERVICE_DICT)) wait_for_container(REDIS_SERVICE_DICT['redis']) time.sleep(REDIS_START_TIMEOUT) - if update_type == MirageUpdateType.FROM_BOOT: + if update_type == FairUpdateType.FROM_BOOT: migrate_chain_record(env) - compose_up(env=env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.FAIR) return True -def restore_mirage(env, backup_path, config_only=False): +def restore_fair(env, backup_path, config_only=False): unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -215,7 +215,7 @@ def restore_mirage(env, backup_path, config_only=False): link_env_file() - meta_manager = MirageCliMetaManager() + meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, env['CONTAINER_CONFIGS_STREAM'], @@ -224,7 +224,7 @@ def restore_mirage(env, backup_path, config_only=False): ) if not config_only: - compose_up(env=env, node_type=NodeType.MIRAGE) + compose_up(env=env, node_type=NodeType.FAIR) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -240,7 +240,7 @@ def restore_mirage(env, backup_path, config_only=False): def cleanup(env) -> None: - turn_off(env, node_type=NodeType.MIRAGE) + turn_off(env, node_type=NodeType.FAIR) cleanup_datadir_for_single_chain_node() rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 0361eecd..6986d44e 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -31,7 +31,7 @@ from node_cli.configs import ( COMPOSE_PATH, - MIRAGE_COMPOSE_PATH, + FAIR_COMPOSE_PATH, NGINX_CONTAINER_NAME, REMOVED_CONTAINERS_FOLDER_PATH, SGX_CERTIFICATES_DIR_NAME, @@ -64,16 +64,16 @@ 'bounty': 'skale_bounty', } -BASE_MIRAGE_COMPOSE_SERVICES = { +BASE_FAIR_COMPOSE_SERVICES = { **CORE_COMMON_COMPOSE_SERVICES, - 'mirage-admin': 'mirage_admin', - 'mirage-api': 'mirage_api', + 'fair-admin': 'fair_admin', + 'fair-api': 'fair_api', } -BASE_MIRAGE_BOOT_COMPOSE_SERVICES = { +BASE_FAIR_BOOT_COMPOSE_SERVICES = { **CORE_COMMON_COMPOSE_SERVICES, - 'mirage-boot': 'mirage_boot_admin', - 'mirage-boot-api': 'mirage_boot_api', + 'fair-boot': 'fair_boot_admin', + 'fair-boot-api': 'fair_boot_api', } BASE_SYNC_COMPOSE_SERVICES = { @@ -285,8 +285,8 @@ def compose_build(env: dict, node_type: NodeType): def get_compose_path(node_type: NodeType) -> str: if node_type == NodeType.SYNC: return SYNC_COMPOSE_PATH - elif node_type == NodeType.MIRAGE: - return MIRAGE_COMPOSE_PATH + elif node_type == NodeType.FAIR: + return FAIR_COMPOSE_PATH else: return COMPOSE_PATH @@ -294,8 +294,8 @@ def get_compose_path(node_type: NodeType) -> str: def get_compose_services(node_type: NodeType) -> list[str]: if node_type == NodeType.SYNC: result = list(BASE_SYNC_COMPOSE_SERVICES) - elif node_type == NodeType.MIRAGE: - result = list(BASE_MIRAGE_COMPOSE_SERVICES) + elif node_type == NodeType.FAIR: + result = list(BASE_FAIR_COMPOSE_SERVICES) else: result = list(BASE_SKALE_COMPOSE_SERVICES) @@ -312,7 +312,7 @@ def get_up_compose_cmd(node_type: NodeType, services: list[str] | None = None) - def compose_up( - env, node_type: NodeType, is_mirage_boot: bool = False, services: list[str] | None = None + env, node_type: NodeType, is_fair_boot: bool = False, services: list[str] | None = None ): if node_type == NodeType.SYNC: logger.info('Running containers for sync node') @@ -322,19 +322,19 @@ def compose_up( if 'SGX_CERTIFICATES_DIR_NAME' not in env: env['SGX_CERTIFICATES_DIR_NAME'] = SGX_CERTIFICATES_DIR_NAME - if node_type == NodeType.MIRAGE: - logger.info('Running mirage base set of containers') - if is_mirage_boot: - logger.debug('Launching mirage boot containers with env %s', env) + if node_type == NodeType.FAIR: + logger.info('Running fair base set of containers') + if is_fair_boot: + logger.debug('Launching fair boot containers with env %s', env) run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.MIRAGE, services=list(BASE_MIRAGE_BOOT_COMPOSE_SERVICES) + node_type=NodeType.FAIR, services=list(BASE_FAIR_BOOT_COMPOSE_SERVICES) ), env=env, ) else: - logger.debug('Launching mirage containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.MIRAGE, services=services), env=env) + logger.debug('Launching fair containers with env %s', env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.FAIR, services=services), env=env) else: logger.info('Running skale node base set of containers') logger.debug('Launching skale node containers with env %s', env) @@ -394,15 +394,15 @@ def is_container_running(name: str, dclient: Optional[DockerClient] = None) -> b def is_api_running(node_type: NodeType, dclient: Optional[DockerClient] = None) -> bool: - if node_type == NodeType.MIRAGE: - return is_container_running(name='mirage_api', dclient=dclient) + if node_type == NodeType.FAIR: + return is_container_running(name='fair_api', dclient=dclient) else: return is_container_running(name='skale_api', dclient=dclient) def is_admin_running(node_type: NodeType, client: Optional[DockerClient] = None) -> bool: - if node_type == NodeType.MIRAGE: - result = is_container_running(name='mirage_admin', dclient=client) + if node_type == NodeType.FAIR: + result = is_container_running(name='fair_admin', dclient=client) elif node_type == NodeType.SYNC: result = is_container_running(name='skale_sync_admin', dclient=client) else: diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index 556f5002..0dafd54d 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -39,7 +39,7 @@ def asdict(self) -> dict: @dataclass -class MirageCliMeta(CliMetaBase): +class FairCliMeta(CliMetaBase): def asdict(self) -> dict: return { 'version': self.version, @@ -121,19 +121,19 @@ def update_meta( self.save_meta(meta) -class MirageCliMetaManager(BaseCliMetaManager): - def get_meta_info(self, raw: bool = False) -> MirageCliMeta | dict | None: +class FairCliMetaManager(BaseCliMetaManager): + def get_meta_info(self, raw: bool = False) -> FairCliMeta | dict | None: plain_meta = self._get_plain_meta() if not raw and not plain_meta: return None - allowed_fields = set(MirageCliMeta.__dataclass_fields__.keys()) + allowed_fields = set(FairCliMeta.__dataclass_fields__.keys()) clean_plain_meta = {k: v for k, v in plain_meta.items() if k in allowed_fields} if raw: return clean_plain_meta - return MirageCliMeta(**clean_plain_meta) + return FairCliMeta(**clean_plain_meta) - def compose_default_meta(self) -> MirageCliMeta: - return MirageCliMeta( + def compose_default_meta(self) -> FairCliMeta: + return FairCliMeta( version=DEFAULT_VERSION, config_stream=DEFAULT_CONFIG_STREAM, os_id=DEFAULT_OS_ID, @@ -148,5 +148,5 @@ def update_meta( os_version: str, ) -> None: self.ensure_meta() - meta = MirageCliMeta(version, config_stream, os_id, os_version) + meta = FairCliMeta(version, config_stream, os_id, os_version) self.save_meta(meta) diff --git a/node_cli/utils/node_type.py b/node_cli/utils/node_type.py index 341479f4..f35d4640 100644 --- a/node_cli/utils/node_type.py +++ b/node_cli/utils/node_type.py @@ -23,4 +23,4 @@ class NodeType(Enum): REGULAR = 0 SYNC = 1 - MIRAGE = 2 + FAIR = 2 diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index e4c8aabf..1fd1ca33 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -46,13 +46,13 @@ def print_wallet_info(wallet): ) -def print_mirage_wallet_info(wallet): +def print_fair_wallet_info(wallet): print( inspect.cleandoc(f""" {LONG_LINE} Address: {wallet['address'].lower()} - Node balance: {wallet['mirage_balance']} MIRAGE - Node balance WEI: {wallet['mirage_balance_wei']} MIRAGE WEI + Node balance: {wallet['fair_balance']} FAIR + Node balance WEI: {wallet['fair_balance_wei']} FAIR WEI {LONG_LINE} """) ) @@ -258,7 +258,7 @@ def print_node_info(node, node_status): ) -def print_node_info_mirage(node): +def print_node_info_fair(node): print( inspect.cleandoc(f""" {LONG_LINE} diff --git a/scripts/build.sh b/scripts/build.sh old mode 100755 new mode 100644 index d99bb45a..810e85b9 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -24,7 +24,7 @@ fi if [ -z "$3" ] then - (>&2 echo 'You should provide type: normal, sync or mirage') + (>&2 echo 'You should provide type: normal, sync or fair') echo $USAGE_MSG exit 1 fi @@ -39,8 +39,8 @@ OS=`uname -s`-`uname -m` if [ "$TYPE" = "sync" ]; then EXECUTABLE_NAME=skale-$VERSION-$OS-sync -elif [ "$TYPE" = "mirage" ]; then - EXECUTABLE_NAME=skale-$VERSION-$OS-mirage +elif [ "$TYPE" = "fair" ]; then + EXECUTABLE_NAME=skale-$VERSION-$OS-fair else EXECUTABLE_NAME=skale-$VERSION-$OS fi diff --git a/scripts/generate_info.sh b/scripts/generate_info.sh old mode 100755 new mode 100644 index f5933f91..f4993b7e --- a/scripts/generate_info.sh +++ b/scripts/generate_info.sh @@ -18,7 +18,7 @@ if [ -z "$BRANCH" ]; then exit 1 fi if [ -z "$TYPE_STR" ]; then - (>&2 echo 'You should provide type: normal, sync or mirage') + (>&2 echo 'You should provide type: normal, sync or fair') echo $USAGE_MSG exit 1 fi @@ -38,11 +38,11 @@ case "$TYPE_STR" in sync) TYPE_ENUM="NodeType.SYNC" ;; - mirage) - TYPE_ENUM="NodeType.MIRAGE" + fair) + TYPE_ENUM="NodeType.FAIR" ;; *) - (>&2 echo "Error: Invalid type '$TYPE_STR'. Must be 'normal', 'sync', or 'mirage'") + (>&2 echo "Error: Invalid type '$TYPE_STR'. Must be 'normal', 'sync', or 'fair'") exit 1 ;; esac diff --git a/tests/cli/mirage_cli_test.py b/tests/cli/fair_cli_test.py similarity index 75% rename from tests/cli/mirage_cli_test.py rename to tests/cli/fair_cli_test.py index fb7462cd..d29ae357 100644 --- a/tests/cli/mirage_cli_test.py +++ b/tests/cli/fair_cli_test.py @@ -3,20 +3,20 @@ from click.testing import CliRunner -from node_cli.cli.mirage_boot import ( +from node_cli.cli.fair_boot import ( init_boot, register_boot, signature_boot, ) -from node_cli.cli.mirage_node import ( +from node_cli.cli.fair_node import ( backup_node, migrate_node, restore_node, ) -@mock.patch('node_cli.cli.mirage_node.restore_mirage') -def test_mirage_node_restore(mock_restore_core, valid_env_file, tmp_path): +@mock.patch('node_cli.cli.fair_node.restore_fair') +def test_fair_node_restore(mock_restore_core, valid_env_file, tmp_path): runner = CliRunner() backup_file = tmp_path / 'backup.tar.gz' backup_file.touch() @@ -28,8 +28,8 @@ def test_mirage_node_restore(mock_restore_core, valid_env_file, tmp_path): mock_restore_core.assert_called_once_with(backup_path, valid_env_file, False) -@mock.patch('node_cli.cli.mirage_node.restore_mirage') -def test_mirage_node_restore_config_only(mock_restore_core, valid_env_file, tmp_path): +@mock.patch('node_cli.cli.fair_node.restore_fair') +def test_fair_node_restore_config_only(mock_restore_core, valid_env_file, tmp_path): runner = CliRunner() backup_file = tmp_path / 'backup_config.tar.gz' backup_file.touch() @@ -41,8 +41,8 @@ def test_mirage_node_restore_config_only(mock_restore_core, valid_env_file, tmp_ mock_restore_core.assert_called_once_with(backup_path, valid_env_file, True) -@mock.patch('node_cli.cli.mirage_node.backup') -def test_mirage_node_backup(mock_backup_core, tmp_path): +@mock.patch('node_cli.cli.fair_node.backup') +def test_fair_node_backup(mock_backup_core, tmp_path): runner = CliRunner() backup_folder = str(tmp_path / 'backups') pathlib.Path(backup_folder).mkdir(exist_ok=True) @@ -53,8 +53,8 @@ def test_mirage_node_backup(mock_backup_core, tmp_path): mock_backup_core.assert_called_once_with(backup_folder) -@mock.patch('node_cli.cli.mirage_boot.register') -def test_mirage_boot_register(mock_register_core): +@mock.patch('node_cli.cli.fair_boot.register') +def test_fair_boot_register(mock_register_core): runner = CliRunner() name = 'test-boot-node' ip = '1.2.3.4' @@ -71,8 +71,8 @@ def test_mirage_boot_register(mock_register_core): ) -@mock.patch('node_cli.cli.mirage_boot.get_node_signature') -def test_mirage_boot_signature(mock_signature_core): +@mock.patch('node_cli.cli.fair_boot.get_node_signature') +def test_fair_boot_signature(mock_signature_core): runner = CliRunner() validator_id = '101' signature_val = '0xdef456' @@ -85,8 +85,8 @@ def test_mirage_boot_signature(mock_signature_core): assert f'Signature: {signature_val}' in result.output -@mock.patch('node_cli.cli.mirage_boot.init') -def test_mirage_boot_init(mock_init_core, valid_env_file): +@mock.patch('node_cli.cli.fair_boot.init') +def test_fair_boot_init(mock_init_core, valid_env_file): runner = CliRunner() result = runner.invoke(init_boot, [valid_env_file]) @@ -94,8 +94,8 @@ def test_mirage_boot_init(mock_init_core, valid_env_file): mock_init_core.assert_called_once_with(valid_env_file) -@mock.patch('node_cli.cli.mirage_node.migrate_from_boot') -def test_mirage_node_migrate(mock_migrate_core, valid_env_file): +@mock.patch('node_cli.cli.fair_node.migrate_from_boot') +def test_fair_node_migrate(mock_migrate_core, valid_env_file): runner = CliRunner() result = runner.invoke(migrate_node, ['--yes', valid_env_file]) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 60344b71..834d85f9 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -326,7 +326,7 @@ def test_backup(): 'node_type,test_user_conf', [ (NodeType.REGULAR, 'regular_user_conf'), - (NodeType.MIRAGE, 'mirage_user_conf'), + (NodeType.FAIR, 'fair_user_conf'), (NodeType.SYNC, 'sync_user_conf'), ], ) diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index a9703b38..be7d1eb8 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -14,8 +14,8 @@ ) from node_cli.configs.user import ( ALLOWED_ENV_TYPES, - MirageBootUserConfig, - MirageUserConfig, + FairBootUserConfig, + FairUserConfig, SkaleUserConfig, SyncUserConfig, get_user_config_class, @@ -37,17 +37,17 @@ def json(self): @pytest.mark.parametrize( - 'node_type, is_mirage_boot, expected_type', + 'node_type, is_fair_boot, expected_type', [ (NodeType.REGULAR, False, SkaleUserConfig), (NodeType.SYNC, False, SyncUserConfig), - (NodeType.MIRAGE, True, MirageBootUserConfig), - (NodeType.MIRAGE, False, MirageUserConfig), + (NodeType.FAIR, True, FairBootUserConfig), + (NodeType.FAIR, False, FairUserConfig), ], - ids=['regular', 'sync', 'mirage_boot', 'mirage_regular'], + ids=['regular', 'sync', 'fair_boot', 'fair_regular'], ) -def test_build_env_params_keys(node_type, is_mirage_boot, expected_type): - env_type = get_user_config_class(node_type=node_type, is_mirage_boot=is_mirage_boot) +def test_build_env_params_keys(node_type, is_fair_boot, expected_type): + env_type = get_user_config_class(node_type=node_type, is_fair_boot=is_fair_boot) assert env_type == expected_type diff --git a/tests/conftest.py b/tests/conftest.py index c6f71fac..05da0fae 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -324,7 +324,7 @@ def regular_user_conf(tmp_path): @pytest.fixture -def mirage_user_conf(tmp_path): +def fair_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') try: test_env = """ @@ -335,7 +335,7 @@ def mirage_user_conf(tmp_path): DISK_MOUNTPOINT=/dev/sss ENV_TYPE='devnet' ENFORCE_BTRFS=False - MIRAGE_CONTRACTS='test-mirage' + FAIR_CONTRACTS='test-fair' """ with open(test_env_path, 'w') as env_file: env_file.write(test_env) @@ -345,7 +345,7 @@ def mirage_user_conf(tmp_path): @pytest.fixture -def mirage_boot_user_conf(tmp_path): +def fair_boot_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') try: test_env = """ diff --git a/tests/core/core_checks_test.py b/tests/core/core_checks_test.py index cd3a6acb..a4f7d437 100644 --- a/tests/core/core_checks_test.py +++ b/tests/core/core_checks_test.py @@ -40,7 +40,7 @@ def requirements_data(): @pytest.fixture -def mirage_requirements_data(requirements_data): +def fair_requirements_data(requirements_data): reqs = {k: v.copy() for k, v in requirements_data.items()} reqs['package']['lvm2'] = 'disabled' return reqs @@ -338,16 +338,16 @@ def test_get_checks(requirements_data): assert len(checks) == 2 -def test_get_checks_mirage(mirage_requirements_data): +def test_get_checks_fair(fair_requirements_data): disk = 'test-disk' - mirage_checkers = get_all_checkers(disk, mirage_requirements_data) - - mirage_all_checks = get_checks(mirage_checkers, CheckType.ALL) - mirage_all_names = {f.func.__name__ for f in mirage_all_checks} - assert 'network' in mirage_all_names - assert 'lvm2' not in mirage_all_names - assert 'cpu_total' in mirage_all_names - assert 'btrfs_progs' in mirage_all_names + fair_checkers = get_all_checkers(disk, fair_requirements_data) + + fair_all_checks = get_checks(fair_checkers, CheckType.ALL) + fair_all_names = {f.func.__name__ for f in fair_all_checks} + assert 'network' in fair_all_names + assert 'lvm2' not in fair_all_names + assert 'cpu_total' in fair_all_names + assert 'btrfs_progs' in fair_all_names def test_get_save_report(tmp_dir_path): diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index fb1484d6..894e07b6 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -33,15 +33,15 @@ WRONG_CONTAINERS = [ 'WRONG_CONTAINER_1', 'skale_WRONG_CONTAINER_4', - 'mirage_WRONG_CONTAINER_6', + 'fair_WRONG_CONTAINER_6', 'sync_WRONG_CONTAINER_8', ] NODE_TYPE_BOOT_COMBINATIONS: list[tuple[NodeType, bool]] = [ (NodeType.REGULAR, False), (NodeType.SYNC, False), - (NodeType.MIRAGE, True), - (NodeType.MIRAGE, False), + (NodeType.FAIR, True), + (NodeType.FAIR, False), ] alive_test_params = [ @@ -120,7 +120,7 @@ def manage_node_containers(request): indirect=['manage_node_containers'], ) def test_is_base_containers_alive(manage_node_containers, node_type, is_boot): - assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is True + assert is_base_containers_alive(node_type=node_type, is_fair_boot=is_boot) is True @pytest.mark.parametrize( @@ -129,7 +129,7 @@ def test_is_base_containers_alive(manage_node_containers, node_type, is_boot): indirect=['manage_node_containers'], ) def test_is_base_containers_alive_wrong(manage_node_containers, node_type, is_boot): - assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False + assert is_base_containers_alive(node_type=node_type, is_fair_boot=is_boot) is False @pytest.mark.parametrize( @@ -138,12 +138,12 @@ def test_is_base_containers_alive_wrong(manage_node_containers, node_type, is_bo indirect=['manage_node_containers'], ) def test_is_base_containers_alive_missing(manage_node_containers, node_type, is_boot): - assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False + assert is_base_containers_alive(node_type=node_type, is_fair_boot=is_boot) is False @pytest.mark.parametrize('node_type, is_boot', NODE_TYPE_BOOT_COMBINATIONS) def test_is_base_containers_alive_empty(node_type, is_boot): - assert is_base_containers_alive(node_type=node_type, is_mirage_boot=is_boot) is False + assert is_base_containers_alive(node_type=node_type, is_fair_boot=is_boot) is False @pytest.mark.parametrize( @@ -183,8 +183,8 @@ def test_is_base_containers_alive_empty(node_type, is_boot): False, ), ( - NodeType.MIRAGE, - 'mirage_boot_user_conf', + NodeType.FAIR, + 'fair_boot_user_conf', True, True, False, @@ -193,8 +193,8 @@ def test_is_base_containers_alive_empty(node_type, is_boot): False, ), ( - NodeType.MIRAGE, - 'mirage_user_conf', + NodeType.FAIR, + 'fair_user_conf', False, True, False, @@ -207,8 +207,8 @@ def test_is_base_containers_alive_empty(node_type, is_boot): 'regular', 'regular_sync_flag', 'sync', - 'mirage_boot', - 'mirage_regular', + 'fair_boot', + 'fair_regular', ], ) def test_compose_node_env( @@ -234,7 +234,7 @@ def test_compose_node_env( inited_node=inited_node, sync_schains=sync_schains, node_type=node_type, - is_mirage_boot=is_boot, + is_fair_boot=is_boot, save=True, ) @@ -358,7 +358,7 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n assert result is None -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.FAIR]) @mock.patch('node_cli.core.node.is_admin_running', return_value=False) @mock.patch('node_cli.core.node.is_api_running', return_value=False) @mock.patch('node_cli.utils.helper.requests.get') @@ -379,7 +379,7 @@ def test_is_update_safe_when_admin_not_running_for_sync( mock_requests_get.assert_not_called() -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.FAIR]) @pytest.mark.parametrize( 'api_is_safe, expected_result', [(True, True), (False, False)], @@ -395,7 +395,7 @@ def test_is_update_safe_when_admin_running( mock_requests_get.assert_called_once() -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.MIRAGE]) +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.FAIR]) @pytest.mark.parametrize( 'api_is_safe, expected_result', [(True, True), (False, False)], @@ -417,7 +417,7 @@ def test_is_update_safe_when_only_api_running_for_regular( mock_requests_get.assert_called_once() -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.MIRAGE]) +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.FAIR]) @mock.patch('node_cli.core.node.is_admin_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_api_call_fails(mock_requests_get, mock_is_admin_running, node_type): diff --git a/tests/core/nginx_test.py b/tests/core/nginx_test.py index b19a21ca..56b6eb49 100644 --- a/tests/core/nginx_test.py +++ b/tests/core/nginx_test.py @@ -63,16 +63,16 @@ def nginx_template(): (NodeType.REGULAR, False, True, False), (NodeType.SYNC, True, True, True), (NodeType.SYNC, False, True, False), - (NodeType.MIRAGE, True, False, True), - (NodeType.MIRAGE, False, False, False), + (NodeType.FAIR, True, False, True), + (NodeType.FAIR, False, False, False), ], ids=[ 'regular_ssl_on', 'regular_ssl_off', 'regular_ssl_on', 'regular_ssl_off', - 'mirage_ssl_on', - 'mirage_ssl_off', + 'fair_ssl_on', + 'fair_ssl_off', ], ) @mock.patch('node_cli.core.nginx.check_ssl_certs') @@ -134,7 +134,7 @@ def test_check_ssl_certs_missing_both(ssl_folder): [ (NodeType.REGULAR, True), (NodeType.SYNC, True), - (NodeType.MIRAGE, False), + (NodeType.FAIR, False), ], ) @mock.patch('node_cli.core.nginx.TYPE') diff --git a/tests/mirage/__init__.py b/tests/fair/__init__.py similarity index 100% rename from tests/mirage/__init__.py rename to tests/fair/__init__.py diff --git a/tests/mirage/mirage_node_test.py b/tests/fair/fair_node_test.py similarity index 61% rename from tests/mirage/mirage_node_test.py rename to tests/fair/fair_node_test.py index f4f9d340..fab00451 100644 --- a/tests/mirage/mirage_node_test.py +++ b/tests/fair/fair_node_test.py @@ -5,19 +5,19 @@ from node_cli.configs import SKALE_DIR from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH -from node_cli.mirage.mirage_boot import init as init_boot -from node_cli.mirage.mirage_boot import update -from node_cli.mirage.mirage_node import cleanup, migrate_from_boot, request_repair, restore_mirage -from node_cli.operations.mirage import MirageUpdateType +from node_cli.fair.fair_boot import init as init_boot +from node_cli.fair.fair_boot import update +from node_cli.fair.fair_node import cleanup, migrate_from_boot, request_repair, restore_fair +from node_cli.operations.fair import FairUpdateType from node_cli.utils.node_type import NodeType from tests.helper import CURRENT_DATETIME, CURRENT_TIMESTAMP -@mock.patch('node_cli.mirage.mirage_node.time.sleep') -@mock.patch('node_cli.mirage.mirage_node.restore_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.save_env_params') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') -def test_restore_mirage( +@mock.patch('node_cli.fair.fair_node.time.sleep') +@mock.patch('node_cli.fair.fair_node.restore_fair_op') +@mock.patch('node_cli.fair.fair_node.save_env_params') +@mock.patch('node_cli.fair.fair_node.compose_node_env') +def test_restore_fair( mock_compose_env, mock_save_env, mock_restore_op, @@ -30,20 +30,20 @@ def test_restore_mirage( mock_restore_op.return_value = True backup_path = '/fake/backup' - restore_mirage(backup_path, valid_env_file) + restore_fair(backup_path, valid_env_file) - mock_compose_env.assert_called_once_with(valid_env_file, node_type=NodeType.MIRAGE) + mock_compose_env.assert_called_once_with(valid_env_file, node_type=NodeType.FAIR) mock_save_env.assert_called_once_with(valid_env_file) expected_env = {**mock_env, 'SKALE_DIR': SKALE_DIR} mock_restore_op.assert_called_once_with(expected_env, backup_path, config_only=False) mock_sleep.assert_called_once() -@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.mirage.mirage_boot.time.sleep') -@mock.patch('node_cli.mirage.mirage_boot.init_mirage_boot_op') -@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') -def test_init_mirage_boot( +@mock.patch('node_cli.fair.fair_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.fair.fair_boot.time.sleep') +@mock.patch('node_cli.fair.fair_boot.init_fair_boot_op') +@mock.patch('node_cli.fair.fair_boot.compose_node_env') +def test_init_fair_boot( mock_compose_env, mock_init_op, mock_sleep, @@ -58,20 +58,20 @@ def test_init_mirage_boot( mock_compose_env.assert_called_once_with( valid_env_file, - node_type=NodeType.MIRAGE, - is_mirage_boot=True, + node_type=NodeType.FAIR, + is_fair_boot=True, ) mock_init_op.assert_called_once_with(valid_env_file, mock_env) mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) + mock_is_alive.assert_called_once_with(node_type=NodeType.FAIR, is_fair_boot=True) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.mirage.mirage_boot.time.sleep') -@mock.patch('node_cli.mirage.mirage_boot.update_mirage_boot_op') -@mock.patch('node_cli.mirage.mirage_boot.compose_node_env') -def test_update_mirage_boot( +@mock.patch('node_cli.fair.fair_boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.fair.fair_boot.time.sleep') +@mock.patch('node_cli.fair.fair_boot.update_fair_boot_op') +@mock.patch('node_cli.fair.fair_boot.compose_node_env') +def test_update_fair_boot( mock_compose_env, mock_update_op, mock_sleep, @@ -85,7 +85,7 @@ def test_update_mirage_boot( mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env mock_update_op.return_value = True - pull_config_for_schain = 'mirage' + pull_config_for_schain = 'fair' update(valid_env_file, pull_config_for_schain) @@ -94,16 +94,16 @@ def test_update_mirage_boot( inited_node=True, sync_schains=False, pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.MIRAGE, - is_mirage_boot=True, + node_type=NodeType.FAIR, + is_fair_boot=True, ) mock_update_op.assert_called_once_with(valid_env_file, mock_env) mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.MIRAGE, is_mirage_boot=True) + mock_is_alive.assert_called_once_with(node_type=NodeType.FAIR, is_fair_boot=True) -@mock.patch('node_cli.mirage.mirage_node.update_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +@mock.patch('node_cli.fair.fair_node.update_fair_op') +@mock.patch('node_cli.fair.fair_node.compose_node_env') @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) def test_migrate_from_boot( mock_is_user_valid, @@ -124,17 +124,17 @@ def test_migrate_from_boot( valid_env_file, inited_node=True, sync_schains=False, - node_type=NodeType.MIRAGE, + node_type=NodeType.FAIR, ) mock_migrate_op.assert_called_once_with( - valid_env_file, mock_env, update_type=MirageUpdateType.FROM_BOOT + valid_env_file, mock_env, update_type=FairUpdateType.FROM_BOOT ) @freezegun.freeze_time(CURRENT_DATETIME) -@mock.patch('node_cli.mirage.mirage_node.compose_node_env', return_value={'ENV_TYPE': 'devnet'}) -@mock.patch('node_cli.mirage.record.chain_record.get_mirage_chain_name', return_value='test') -def test_mirage_repair(compose_node_env_mock, get_static_params_mock, redis_client, inited_node): +@mock.patch('node_cli.fair.fair_node.compose_node_env', return_value={'ENV_TYPE': 'devnet'}) +@mock.patch('node_cli.fair.record.chain_record.get_fair_chain_name', return_value='test') +def test_fair_repair(compose_node_env_mock, get_static_params_mock, redis_client, inited_node): request_repair() assert redis_client.get('test_repair_ts') == f'{CURRENT_TIMESTAMP}'.encode('utf-8') assert redis_client.get('test_snapshot_from') == b'' @@ -144,12 +144,12 @@ def test_mirage_repair(compose_node_env_mock, get_static_params_mock, redis_clie @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') -@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +@mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') +@mock.patch('node_cli.fair.fair_node.cleanup_fair_op') +@mock.patch('node_cli.fair.fair_node.compose_node_env') def test_cleanup_success( mock_compose_env, - mock_cleanup_mirage_op, + mock_cleanup_fair_op, mock_cleanup_docker_config, mock_is_user_valid, inited_node, @@ -162,54 +162,54 @@ def test_cleanup_success( cleanup() mock_compose_env.assert_called_once_with( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.MIRAGE + SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR ) - mock_cleanup_mirage_op.assert_called_once_with(mock_env) + mock_cleanup_fair_op.assert_called_once_with(mock_env) mock_cleanup_docker_config.assert_called_once() @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') -@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') +@mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') +@mock.patch('node_cli.fair.fair_node.cleanup_fair_op') +@mock.patch('node_cli.fair.fair_node.compose_node_env') def test_cleanup_calls_operations_in_correct_order( mock_compose_env, - mock_cleanup_mirage_op, + mock_cleanup_fair_op, mock_cleanup_docker_config, mock_is_user_valid, inited_node, resource_alloc, meta_file_v3, ): - from node_cli.mirage.mirage_node import cleanup + from node_cli.fair.fair_node import cleanup mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env manager = mock.Mock() manager.attach_mock(mock_compose_env, 'compose_env') - manager.attach_mock(mock_cleanup_mirage_op, 'cleanup_mirage_op') + manager.attach_mock(mock_cleanup_fair_op, 'cleanup_fair_op') manager.attach_mock(mock_cleanup_docker_config, 'cleanup_docker_config') cleanup() expected_calls = [ mock.call.compose_env(mock.ANY, save=False, node_type=mock.ANY), - mock.call.cleanup_mirage_op(mock_env), + mock.call.cleanup_fair_op(mock_env), mock.call.cleanup_docker_config(), ] manager.assert_has_calls(expected_calls, any_order=False) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') +@mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') @mock.patch( - 'node_cli.mirage.mirage_node.cleanup_mirage_op', side_effect=Exception('Cleanup failed') + 'node_cli.fair.fair_node.cleanup_fair_op', side_effect=Exception('Cleanup failed') ) -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') -def test_cleanup_continues_after_mirage_op_error( +@mock.patch('node_cli.fair.fair_node.compose_node_env') +def test_cleanup_continues_after_fair_op_error( mock_compose_env, - mock_cleanup_mirage_op, + mock_cleanup_fair_op, mock_cleanup_docker_config, mock_is_user_valid, inited_node, @@ -223,7 +223,7 @@ def test_cleanup_continues_after_mirage_op_error( cleanup() mock_compose_env.assert_called_once() - mock_cleanup_mirage_op.assert_called_once_with(mock_env) + mock_cleanup_fair_op.assert_called_once_with(mock_env) mock_cleanup_docker_config.assert_not_called() @@ -237,7 +237,7 @@ def test_cleanup_fails_when_user_invalid( """Test that cleanup fails when user validation fails""" import pytest - from node_cli.mirage.mirage_node import cleanup + from node_cli.fair.fair_node import cleanup with pytest.raises(SystemExit): cleanup() @@ -251,14 +251,14 @@ def test_cleanup_fails_when_not_inited(ensure_meta_removed): @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.mirage.mirage_node.cleanup_docker_configuration') -@mock.patch('node_cli.mirage.mirage_node.cleanup_mirage_op') -@mock.patch('node_cli.mirage.mirage_node.compose_node_env') -@mock.patch('node_cli.mirage.mirage_node.logger') +@mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') +@mock.patch('node_cli.fair.fair_node.cleanup_fair_op') +@mock.patch('node_cli.fair.fair_node.compose_node_env') +@mock.patch('node_cli.fair.fair_node.logger') def test_cleanup_logs_success_message( mock_logger, mock_compose_env, - mock_cleanup_mirage_op, + mock_cleanup_fair_op, mock_cleanup_docker_config, mock_is_user_valid, inited_node, @@ -271,5 +271,5 @@ def test_cleanup_logs_success_message( cleanup() mock_logger.info.assert_called_once_with( - 'Mirage node was cleaned up, all containers and data removed' + 'Fair node was cleaned up, all containers and data removed' ) diff --git a/tests/routes_test.py b/tests/routes_test.py index 2bf8ffd2..872c53e1 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -31,8 +31,8 @@ '/api/v1/ssl/upload', '/api/v1/wallet/info', '/api/v1/wallet/send-eth', - '/api/v1/mirage-node/info', - '/api/v1/mirage-node/register', + '/api/v1/fair-node/info', + '/api/v1/fair-node/register', ] diff --git a/tests/tools_meta_test.py b/tests/tools_meta_test.py index 3c252b9c..a2a6a2fc 100644 --- a/tests/tools_meta_test.py +++ b/tests/tools_meta_test.py @@ -7,8 +7,8 @@ DEFAULT_VERSION, CliMeta, CliMetaManager, - MirageCliMeta, - MirageCliMetaManager, + FairCliMeta, + FairCliMetaManager, ) from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3 @@ -121,37 +121,37 @@ def test_ensure_meta(ensure_meta_removed): assert CliMetaManager().get_meta_info() == CliMeta(DEFAULT_VERSION, DEFAULT_CONFIG_STREAM) -def test_mirage_get_meta_info_v1(meta_file_v1): - meta = MirageCliMetaManager().get_meta_info() +def test_fair_get_meta_info_v1(meta_file_v1): + meta = FairCliMetaManager().get_meta_info() assert meta.version == TEST_META_V1['version'] assert meta.config_stream == TEST_META_V1['config_stream'] assert meta.os_id == 'ubuntu' assert meta.os_version == '18.04' -def test_mirage_get_meta_info_v2(meta_file_v2): - meta = MirageCliMetaManager().get_meta_info() +def test_fair_get_meta_info_v2(meta_file_v2): + meta = FairCliMetaManager().get_meta_info() assert meta.version == TEST_META_V2['version'] assert meta.config_stream == TEST_META_V2['config_stream'] assert meta.os_id == 'ubuntu' # default value assert meta.os_version == '18.04' # default value -def test_mirage_get_meta_info_v3(meta_file_v3): - meta = MirageCliMetaManager().get_meta_info() +def test_fair_get_meta_info_v3(meta_file_v3): + meta = FairCliMetaManager().get_meta_info() assert meta.version == TEST_META_V3['version'] assert meta.config_stream == TEST_META_V3['config_stream'] assert meta.os_id == TEST_META_V3['os_id'] assert meta.os_version == TEST_META_V3['os_version'] -def test_mirage_get_meta_info_empty(): - meta = MirageCliMetaManager().get_meta_info() +def test_fair_get_meta_info_empty(): + meta = FairCliMetaManager().get_meta_info() assert meta is None -def test_mirage_compose_default_meta(): - meta = MirageCliMetaManager().compose_default_meta() +def test_fair_compose_default_meta(): + meta = FairCliMetaManager().compose_default_meta() assert meta.version == '1.0.0' assert meta.config_stream == '1.1.0' assert meta.os_id == 'ubuntu' @@ -159,79 +159,79 @@ def test_mirage_compose_default_meta(): assert not hasattr(meta, 'docker_lvmpy_stream') -def test_mirage_save_meta(meta_file_v2): - meta = MirageCliMeta( - version='2.2.2', config_stream='mirage-stable', os_id='debian', os_version='11' +def test_fair_save_meta(meta_file_v2): + meta = FairCliMeta( + version='2.2.2', config_stream='fair-stable', os_id='debian', os_version='11' ) - MirageCliMetaManager().save_meta(meta) + FairCliMetaManager().save_meta(meta) with open(META_FILEPATH) as meta_f: saved_json = json.load(meta_f) assert saved_json == { 'version': '2.2.2', - 'config_stream': 'mirage-stable', + 'config_stream': 'fair-stable', 'os_id': 'debian', 'os_version': '11', } assert 'docker_lvmpy_stream' not in saved_json -def test_mirage_update_meta_from_v2_to_v3(meta_file_v2): - old_meta = MirageCliMetaManager().get_meta_info() - MirageCliMetaManager().update_meta( +def test_fair_update_meta_from_v2_to_v3(meta_file_v2): + old_meta = FairCliMetaManager().get_meta_info() + FairCliMetaManager().update_meta( version='3.3.3', - config_stream='mirage-beta', + config_stream='fair-beta', os_id='debian', os_version='11', ) - meta = MirageCliMetaManager().get_meta_info() + meta = FairCliMetaManager().get_meta_info() assert meta.version == '3.3.3' - assert meta.config_stream == 'mirage-beta' + assert meta.config_stream == 'fair-beta' assert meta.os_id == 'debian' assert meta.os_version == '11' assert meta != old_meta -def test_mirage_update_meta_from_v1(meta_file_v1): - MirageCliMetaManager().update_meta( +def test_fair_update_meta_from_v1(meta_file_v1): + FairCliMetaManager().update_meta( version='4.4.4', - config_stream='mirage-develop', + config_stream='fair-develop', os_id='centos', os_version='8', ) - meta = MirageCliMetaManager().get_meta_info() + meta = FairCliMetaManager().get_meta_info() assert meta.version == '4.4.4' - assert meta.config_stream == 'mirage-develop' + assert meta.config_stream == 'fair-develop' assert meta.os_id == 'centos' assert meta.os_version == '8' -def test_mirage_update_meta_from_v3(meta_file_v3): - MirageCliMetaManager().update_meta( +def test_fair_update_meta_from_v3(meta_file_v3): + FairCliMetaManager().update_meta( version='5.5.5', - config_stream='mirage-stable', + config_stream='fair-stable', os_id='ubuntu', os_version='22.04', ) - meta = MirageCliMetaManager().get_meta_info() + meta = FairCliMetaManager().get_meta_info() assert meta.version == '5.5.5' - assert meta.config_stream == 'mirage-stable' + assert meta.config_stream == 'fair-stable' assert meta.os_id == 'ubuntu' assert meta.os_version == '22.04' -def test_mirage_ensure_meta(ensure_meta_removed): - MirageCliMetaManager().ensure_meta() - assert MirageCliMetaManager().get_meta_info() == MirageCliMeta( +def test_fair_ensure_meta(ensure_meta_removed): + FairCliMetaManager().ensure_meta() + assert FairCliMetaManager().get_meta_info() == FairCliMeta( DEFAULT_VERSION, DEFAULT_CONFIG_STREAM ) - MirageCliMetaManager().ensure_meta(MirageCliMeta(version='1.1.1', config_stream='1.1.1')) - assert MirageCliMetaManager().get_meta_info() == MirageCliMeta( + FairCliMetaManager().ensure_meta(FairCliMeta(version='1.1.1', config_stream='1.1.1')) + assert FairCliMetaManager().get_meta_info() == FairCliMeta( DEFAULT_VERSION, DEFAULT_CONFIG_STREAM ) -def test_mirage_get_meta_info_raw(meta_file_v3): - raw_meta = MirageCliMetaManager().get_meta_info(raw=True) +def test_fair_get_meta_info_raw(meta_file_v3): + raw_meta = FairCliMetaManager().get_meta_info(raw=True) assert isinstance(raw_meta, dict) assert raw_meta['version'] == TEST_META_V3['version'] assert raw_meta['config_stream'] == TEST_META_V3['config_stream'] @@ -240,13 +240,13 @@ def test_mirage_get_meta_info_raw(meta_file_v3): assert 'docker_lvmpy_stream' not in raw_meta -def test_mirage_get_meta_info_raw_empty(): - raw_meta = MirageCliMetaManager().get_meta_info(raw=True) +def test_fair_get_meta_info_raw_empty(): + raw_meta = FairCliMetaManager().get_meta_info(raw=True) assert raw_meta == {} -def test_mirage_asdict(): - meta = MirageCliMeta( +def test_fair_asdict(): + meta = FairCliMeta( version='1.2.3', config_stream='test-stream', os_id='fedora', os_version='35' ) meta_dict = meta.asdict() @@ -260,8 +260,8 @@ def test_mirage_asdict(): assert 'docker_lvmpy_stream' not in meta_dict -def test_mirage_meta_compatibility_with_cli_meta_file(meta_file_v3): - meta = MirageCliMetaManager().get_meta_info() +def test_fair_meta_compatibility_with_cli_meta_file(meta_file_v3): + meta = FairCliMetaManager().get_meta_info() assert meta.version == TEST_META_V3['version'] assert meta.config_stream == TEST_META_V3['config_stream'] assert meta.os_id == TEST_META_V3['os_id'] @@ -270,46 +270,46 @@ def test_mirage_meta_compatibility_with_cli_meta_file(meta_file_v3): assert not hasattr(meta, 'docker_lvmpy_stream') -def test_mirage_save_meta_overwrites_cli_meta(meta_file_v3): +def test_fair_save_meta_overwrites_cli_meta(meta_file_v3): with open(META_FILEPATH) as f: original_data = json.load(f) assert 'docker_lvmpy_stream' in original_data - mirage_meta = MirageCliMeta(version='2.0.0', config_stream='mirage-new') - MirageCliMetaManager().save_meta(mirage_meta) + fair_meta = FairCliMeta(version='2.0.0', config_stream='fair-new') + FairCliMetaManager().save_meta(fair_meta) with open(META_FILEPATH) as f: saved_data = json.load(f) assert 'docker_lvmpy_stream' not in saved_data assert saved_data['version'] == '2.0.0' - assert saved_data['config_stream'] == 'mirage-new' + assert saved_data['config_stream'] == 'fair-new' -def test_mirage_ensure_meta_with_existing_cli_meta(meta_file_v3): - MirageCliMetaManager().ensure_meta() - meta = MirageCliMetaManager().get_meta_info() +def test_fair_ensure_meta_with_existing_cli_meta(meta_file_v3): + FairCliMetaManager().ensure_meta() + meta = FairCliMetaManager().get_meta_info() assert meta.version == TEST_META_V3['version'] assert meta.config_stream == TEST_META_V3['config_stream'] -def test_mirage_meta_defaults(): - meta = MirageCliMeta() +def test_fair_meta_defaults(): + meta = FairCliMeta() assert meta.version == DEFAULT_VERSION assert meta.config_stream == DEFAULT_CONFIG_STREAM assert meta.os_id == 'ubuntu' assert meta.os_version == '18.04' -def test_mirage_meta_partial_initialization(): - meta = MirageCliMeta(version='1.5.0', os_id='alpine') +def test_fair_meta_partial_initialization(): + meta = FairCliMeta(version='1.5.0', os_id='alpine') assert meta.version == '1.5.0' assert meta.config_stream == DEFAULT_CONFIG_STREAM assert meta.os_id == 'alpine' assert meta.os_version == '18.04' -def test_mirage_update_meta_ensure_called(): - manager = MirageCliMetaManager() +def test_fair_update_meta_ensure_called(): + manager = FairCliMetaManager() manager.update_meta(version='1.0.0', config_stream='test', os_id='ubuntu', os_version='20.04') diff --git a/text.yml b/text.yml index 2879b312..c93a743e 100644 --- a/text.yml +++ b/text.yml @@ -77,17 +77,17 @@ lvmpy: help: Run healing procedure for lvmpy server prompt: Are you sure you want run healing procedure? -mirage: +fair: node: repair: - help: Repair mirage chain node - warning: Are you sure you want to repair mirage chain node? In rare cases may cause data loss and require additional maintenance + help: Repair fair chain node + warning: Are you sure you want to repair fair chain node? In rare cases may cause data loss and require additional maintenance snapshot_from: IP of the node to take snapshot from repair_requested: Repair mode is requested not_inited: Node should be initialized to proceed with operation - registered: Node is registered in mirage manager. + registered: Node is registered in fair manager. register: - help: Register node in mirage manager - name: Name of the node in mirage manager - ip: IP address of the node in mirage manager + help: Register node in fair manager + name: Name of the node in fair manager + ip: IP address of the node in fair manager From 5b95fd875253d345884f89c22a3747c05834dd49 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 15 Jul 2025 13:43:31 +0100 Subject: [PATCH 136/332] Update scripts in test pipeline --- .github/workflows/test.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9251c723..1a230126 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -41,7 +41,7 @@ jobs: run: | mkdir -p ./dist docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test normal + docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test normal docker rm -f $(docker ps -aq) - name: Check build - normal @@ -51,7 +51,7 @@ jobs: run: | mkdir -p ./dist docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test sync + docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test sync docker rm -f $(docker ps -aq) - name: Check build - sync @@ -61,7 +61,7 @@ jobs: run: | mkdir -p ./dist docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder scripts/build.sh test test fair + docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test fair docker rm -f $(docker ps -aq) - name: Check build - fair @@ -69,11 +69,11 @@ jobs: - name: Run prepare test build run: | - scripts/build.sh test test normal + bash scripts/build.sh test test normal - name: Run redis run: | - ./helper-scripts/redis/run.sh + bash ./helper-scripts/redis/run.sh - name: Run tests run: | From d2d6a939e76220b55495fd677e9f0137e48f2e20 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 15 Jul 2025 14:46:28 +0100 Subject: [PATCH 137/332] Fix generate info execution --- scripts/build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build.sh b/scripts/build.sh index 810e85b9..a1adc516 100644 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -35,7 +35,7 @@ PARENT_DIR="$(dirname "$DIR")" OS=`uname -s`-`uname -m` # Use the new generate_info.sh script -"${DIR}/generate_info.sh" "$VERSION" "$BRANCH" "$TYPE" +bash "${DIR}/generate_info.sh" "$VERSION" "$BRANCH" "$TYPE" if [ "$TYPE" = "sync" ]; then EXECUTABLE_NAME=skale-$VERSION-$OS-sync From 95f7d6525c076f6ffc5011db9e1f420dd7c7c358 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 15 Jul 2025 15:01:11 +0100 Subject: [PATCH 138/332] Fix backup test --- tests/cli/node_test.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 834d85f9..0db1d97a 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -318,8 +318,7 @@ def test_backup(): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) result = run_command(backup_node, ['/tmp']) assert result.exit_code == 0 - print(result.output) - assert 'Backup archive succesfully created ' in result.output + assert 'Backup archive successfully created ' in result.output @pytest.mark.parametrize( From ce843715886c995d49e04d1873d2d5626f2e8d51 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 15 Jul 2025 15:17:27 +0100 Subject: [PATCH 139/332] Trigger node-cli build --- README.md | 228 +++++++++++++++++++++++++++--------------------------- 1 file changed, 114 insertions(+), 114 deletions(-) diff --git a/README.md b/README.md index 5bf844f6..9336f431 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ![Test](https://github.com/skalenetwork/node-cli/workflows/Test/badge.svg) [![Discord](https://img.shields.io/discord/534485763354787851.svg)](https://discord.gg/vvUtWJB) -SKALE Node CLI, part of the SKALE suite of validator tools, is the command line interface to setup, register and maintain your SKALE node. It comes in three distinct build types: Standard (for validator nodes), Sync (for dedicated sChain synchronization), and Fair (for the Fair network). +SKALE Node CLI, part of the SKALE suite of validator tools, is the command line interface to setup, register and maintain your SKALE node. It comes in three distinct build types: Standard (for validator nodes), Sync (for dedicated sChain synchronization), and Fair. ## Table of Contents @@ -32,7 +32,7 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 5. [Exit codes](#exit-codes) 6. [Development](#development) ---- +*** ## Installation @@ -92,7 +92,7 @@ skale --help fair --help ``` ---- +*** ## Standard Node Usage (`skale` - Normal Build) @@ -118,7 +118,7 @@ skale version Options: -- `--short` - prints version only, without additional text. +* `--short` - prints version only, without additional text. ### Node commands (Standard) @@ -134,7 +134,7 @@ skale node info Options: -- `-f/--format json/text` - optional. +* `-f/--format json/text` - optional. #### Node initialization @@ -148,28 +148,28 @@ skale node init [ENV_FILE] Arguments: -- `ENV_FILE` - path to .env file (required). +* `ENV_FILE` - path to .env file (required). Required environment variables in `ENV_FILE`: -- `SGX_SERVER_URL` - SGX server URL. -- `DISK_MOUNTPOINT` - Mount point for storing sChains data. -- `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy` to use. -- `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` to use. -- `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. -- `MANAGER_CONTRACTS` - SKALE Manager `message_proxy_mainnet` contract alias or address. -- `IMA_CONTRACTS` - IMA `skale_manager` contract alias or address. -- `FILEBEAT_HOST` - URL of the Filebeat log server. -- `ENV_TYPE` - Environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet'). +* `SGX_SERVER_URL` - SGX server URL. +* `DISK_MOUNTPOINT` - Mount point for storing sChains data. +* `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy` to use. +* `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` to use. +* `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. +* `MANAGER_CONTRACTS` - SKALE Manager `message_proxy_mainnet` contract alias or address. +* `IMA_CONTRACTS` - IMA `skale_manager` contract alias or address. +* `FILEBEAT_HOST` - URL of the Filebeat log server. +* `ENV_TYPE` - Environment type (e.g., 'mainnet', 'testnet', 'qanet', 'devnet'). > In `MANAGER_CONTRACTS` and `IMA_CONTRACTS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). > :warning: If you are using a custom network or a contract which isn't recognized by the underlying `skale-contracts` library, you **MUST** provide a direct contract address. Optional variables: -- `TG_API_KEY` - Telegram API key -- `TG_CHAT_ID` - Telegram chat ID -- `MONITORING_CONTAINERS` - Enable monitoring containers (`cadvisor`, `node-exporter`). +* `TG_API_KEY` - Telegram API key +* `TG_CHAT_ID` - Telegram chat ID +* `MONITORING_CONTAINERS` - Enable monitoring containers (`cadvisor`, `node-exporter`). #### Node initialization from backup @@ -181,8 +181,8 @@ skale node restore [BACKUP_PATH] [ENV_FILE] Arguments: -- `BACKUP_PATH` - Path to the archive created by `skale node backup`. -- `ENV_FILE` - Path to .env file with configuration for the restored node. +* `BACKUP_PATH` - Path to the archive created by `skale node backup`. +* `ENV_FILE` - Path to .env file with configuration for the restored node. #### Node backup @@ -194,7 +194,7 @@ skale node backup [BACKUP_FOLDER_PATH] Arguments: -- `BACKUP_FOLDER_PATH` - Path to the folder where the backup file will be saved. +* `BACKUP_FOLDER_PATH` - Path to the folder where the backup file will be saved. #### Node Registration @@ -206,13 +206,13 @@ skale node register --name --ip --domain [ Required arguments: -- `--ip` - Public IP for RPC connections and consensus. -- `--domain`/`-d` - SKALE node domain name. -- `--name` - SKALE node name. +* `--ip` - Public IP for RPC connections and consensus. +* `--domain`/`-d` - SKALE node domain name. +* `--name` - SKALE node name. Optional arguments: -- `--port` - Base port for node sChains (default: `10000`). +* `--port` - Base port for node sChains (default: `10000`). #### Node update @@ -224,11 +224,11 @@ skale node update [ENV_FILEPATH] [--yes] Arguments: -- `ENV_FILEPATH` - Path to the .env file containing potentially updated parameters. +* `ENV_FILEPATH` - Path to the .env file containing potentially updated parameters. Options: -- `--yes` - Update without confirmation prompt. +* `--yes` - Update without confirmation prompt. #### Node turn-off @@ -240,8 +240,8 @@ skale node turn-off [--maintenance-on] [--yes] Options: -- `--maintenance-on` - Set node to maintenance mode before turning off. -- `--yes` - Turn off without confirmation. +* `--maintenance-on` - Set node to maintenance mode before turning off. +* `--yes` - Turn off without confirmation. #### Node turn-on @@ -253,12 +253,12 @@ skale node turn-on [ENV_FILEPATH] [--maintenance-off] [--yes] Arguments: -- `ENV_FILEPATH` - Path to the .env file. +* `ENV_FILEPATH` - Path to the .env file. Options: -- `--maintenance-off` - Turn off maintenance mode after turning on. -- `--yes` - Turn on without additional confirmation. +* `--maintenance-off` - Turn off maintenance mode after turning on. +* `--yes` - Turn on without additional confirmation. #### Node maintenance @@ -274,7 +274,7 @@ skale node maintenance-off Options: -- `--yes` - Perform action without additional confirmation. +* `--yes` - Perform action without additional confirmation. #### Domain name @@ -286,11 +286,11 @@ skale node set-domain --domain [--yes] Required Options: -- `--domain`/`-d` - The new SKALE node domain name. +* `--domain`/`-d` - The new SKALE node domain name. Options: -- `--yes` - Set without additional confirmation. +* `--yes` - Set without additional confirmation. #### Skale Node Signature @@ -302,7 +302,7 @@ skale node signature Arguments: -- `VALIDATOR_ID` - The ID of the validator requesting the signature. +* `VALIDATOR_ID` - The ID of the validator requesting the signature. ### Wallet commands (Standard) @@ -318,7 +318,7 @@ skale wallet info [-f json/text] Options: -- `-f/--format json/text` - optional. +* `-f/--format json/text` - optional. #### Wallet setting @@ -338,12 +338,12 @@ skale wallet send [--yes] Arguments: -- `RECEIVER_ADDRESS` - Ethereum receiver address. -- `AMOUNT_ETH` - Amount of ETH tokens to send. +* `RECEIVER_ADDRESS` - Ethereum receiver address. +* `AMOUNT_ETH` - Amount of ETH tokens to send. Optional arguments: -- `--yes` - Send without additional confirmation. +* `--yes` - Send without additional confirmation. ### sChain commands (Standard) @@ -385,7 +385,7 @@ skale schains info [--json] Options: -- `--json` - Show info in JSON format. +* `--json` - Show info in JSON format. #### Repair sChain @@ -411,7 +411,7 @@ skale health containers [-a/--all] Options: -- `-a/--all` - list all containers (by default - only running). +* `-a/--all` - list all containers (by default - only running). #### Get sChains healthchecks @@ -423,7 +423,7 @@ skale health schains [--json] Options: -- `--json` - Show data in JSON format. +* `--json` - Show data in JSON format. #### Check SGX server status @@ -459,9 +459,9 @@ skale ssl upload -c -k [-f/--force] Options: -- `-c/--cert-path` - Path to the certificate file. -- `-k/--key-path` - Path to the key file. -- `-f/--force` - Overwrite existing certificates. +* `-c/--cert-path` - Path to the certificate file. +* `-k/--key-path` - Path to the key file. +* `-f/--force` - Overwrite existing certificates. Admin API URL: `[POST] /api/ssl/upload` @@ -475,11 +475,11 @@ skale ssl check [-c ] [-k ] [--type ] [--port ] Options: -- `-c/--cert-path` - Path to the certificate file (default: uploaded using `skale ssl upload` certificate). -- `-k/--key-path` - Path to the key file (default: uploaded using `skale ssl upload` key). -- `--type/-t` - Check type (`openssl` - openssl cli check, `skaled` - skaled-based check, `all` - both). -- `--port/-p` - Port to start healthcheck server (default: `4536`). -- `--no-client` - Skip client connection (only make sure server started without errors). +* `-c/--cert-path` - Path to the certificate file (default: uploaded using `skale ssl upload` certificate). +* `-k/--key-path` - Path to the key file (default: uploaded using `skale ssl upload` key). +* `--type/-t` - Check type (`openssl` - openssl cli check, `skaled` - skaled-based check, `all` - both). +* `--port/-p` - Port to start healthcheck server (default: `4536`). +* `--no-client` - Skip client connection (only make sure server started without errors). ### Logs commands (Standard) @@ -497,7 +497,7 @@ skale logs cli [--debug] Options: -- `--debug` - show debug logs; more detailed output. +* `--debug` - show debug logs; more detailed output. #### Dump All Node Logs @@ -509,11 +509,11 @@ skale logs dump [TARGET_PATH] [-c/--container ] Arguments: -- `TARGET_PATH` - Optional path to save the log dump archive. +* `TARGET_PATH` - Optional path to save the log dump archive. Options: -- `--container`, `-c` - Dump logs only from specified container. +* `--container`, `-c` - Dump logs only from specified container. ### Resources allocation commands (Standard) @@ -539,14 +539,14 @@ skale resources-allocation generate [ENV_FILE] [--yes] [-f/--force] Arguments: -- `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command). +* `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command). Options: -- `--yes` - generate without additional confirmation. -- `-f/--force` - rewrite allocation file if it exists. +* `--yes` - generate without additional confirmation. +* `-f/--force` - rewrite allocation file if it exists. ---- +*** ## Sync Node Usage (`skale` - Sync Build) @@ -573,7 +573,7 @@ skale version Options: -- `--short` - prints version only, without additional text. +* `--short` - prints version only, without additional text. ### Sync node commands @@ -589,29 +589,29 @@ skale sync-node init [ENV_FILE] [--indexer | --archive] [--snapshot] [--snapshot Arguments: -- `ENV_FILE` - path to .env file (required). +* `ENV_FILE` - path to .env file (required). Required environment variables in `ENV_FILE`: -- `DISK_MOUNTPOINT` - Mount point for storing sChain data. -- `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy`. -- `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node`. -- `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. -- `MANAGER_CONTRACTS` - SKALE Manager alias or address. -- `IMA_CONTRACTS` - IMA alias or address. -- `SCHAIN_NAME` - Name of the specific SKALE chain to sync. -- `ENV_TYPE` - Environment type (e.g., 'mainnet', 'testnet'). +* `DISK_MOUNTPOINT` - Mount point for storing sChain data. +* `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy`. +* `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node`. +* `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. +* `MANAGER_CONTRACTS` - SKALE Manager alias or address. +* `IMA_CONTRACTS` - IMA alias or address. +* `SCHAIN_NAME` - Name of the specific SKALE chain to sync. +* `ENV_TYPE` - Environment type (e.g., 'mainnet', 'testnet'). > In `MANAGER_CONTRACTS` and `IMA_CONTRACTS` fields, if you are using a recognized network (e.g., 'Mainnet', 'Holesky', 'local'), you can use a recognized alias (e.g., 'production', 'grants'). You can check the list of recognized networks and aliases in [contract deployments](https://github.com/skalenetwork/skale-contracts/tree/deployments). > :warning: If you are using a custom network or a contract which isn't recognized by the underlying `skale-contracts` library, you **MUST** provide a direct contract address. Options: -- `--indexer` - Run in indexer mode (disables block rotation). -- `--archive` - Run in archive mode (enable historic state and disable block rotation). -- `--snapshot` - Start sync node from snapshot. -- `--snapshot-from ` - Specify the IP of another node to download a snapshot from. -- `--yes` - Initialize without additional confirmation. +* `--indexer` - Run in indexer mode (disables block rotation). +* `--archive` - Run in archive mode (enable historic state and disable block rotation). +* `--snapshot` - Start sync node from snapshot. +* `--snapshot-from ` - Specify the IP of another node to download a snapshot from. +* `--yes` - Initialize without additional confirmation. #### Sync node update @@ -623,11 +623,11 @@ skale sync-node update [ENV_FILEPATH] [--yes] Arguments: -- `ENV_FILEPATH` - Path to the .env file. +* `ENV_FILEPATH` - Path to the .env file. Options: -- `--yes` - Update without additionalconfirmation. +* `--yes` - Update without additionalconfirmation. > NOTE: You can just update a file with environment variables used during `skale sync-node init`. @@ -641,11 +641,11 @@ skale sync-node cleanup [--yes] Options: -- `--yes` - Cleanup without confirmation. +* `--yes` - Cleanup without confirmation. > WARNING: This command removes all Sync node data. ---- +*** ## Fair Node Usage (`fair`) @@ -671,7 +671,7 @@ fair version [--short] Options: -- `--short` - prints version only, without additional text. +* `--short` - prints version only, without additional text. ### Fair Boot commands @@ -689,26 +689,26 @@ fair boot init [ENV_FILE] Arguments: -- `ENV_FILE` - path to .env file (required). +* `ENV_FILE` - path to .env file (required). Required environment variables in `ENV_FILE`: -- `SGX_SERVER_URL` - SGX server URL. -- `DISK_MOUNTPOINT` - Mount point for storing data (BTRFS recommended). -- `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` configs. -- `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. -- `MANAGER_CONTRACTS` - SKALE Manager alias or address. -- `IMA_CONTRACTS` - IMA alias or address (_Note: Required by boot service, may not be used by Fair itself_). -- `FILEBEAT_HOST` - URL/IP:Port of the Filebeat log server. -- `ENV_TYPE` - Environment type (e.g., 'mainnet', 'devnet'). +* `SGX_SERVER_URL` - SGX server URL. +* `DISK_MOUNTPOINT` - Mount point for storing data (BTRFS recommended). +* `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` configs. +* `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. +* `MANAGER_CONTRACTS` - SKALE Manager alias or address. +* `IMA_CONTRACTS` - IMA alias or address (*Note: Required by boot service, may not be used by Fair itself*). +* `FILEBEAT_HOST` - URL/IP:Port of the Filebeat log server. +* `ENV_TYPE` - Environment type (e.g., 'mainnet', 'devnet'). Optional variables: -- `MONITORING_CONTAINERS` - Enable monitoring containers (`cadvisor`, `node-exporter`). +* `MONITORING_CONTAINERS` - Enable monitoring containers (`cadvisor`, `node-exporter`). #### Fair Boot Registration -Register the Fair node with Fair Manager _during_ the boot phase. +Register the Fair node with Fair Manager *during* the boot phase. ```shell fair boot register --name --ip --domain [--port ] @@ -716,17 +716,17 @@ fair boot register --name --ip --domain [- Required arguments: -- `--name`/`-n` - Fair node name. -- `--ip` - Public IP for RPC connections and consensus. -- `--domain`/`-d` - Fair node domain name (e.g., `fair1.example.com`). +* `--name`/`-n` - Fair node name. +* `--ip` - Public IP for RPC connections and consensus. +* `--domain`/`-d` - Fair node domain name (e.g., `fair1.example.com`). Optional arguments: -- `--port`/`-p` - Base port for node sChains (default: `10000`). +* `--port`/`-p` - Base port for node sChains (default: `10000`). #### Fair Boot Signature -Get the node signature for a validator ID _during_ the boot phase. +Get the node signature for a validator ID *during* the boot phase. ```shell fair boot signature @@ -734,7 +734,7 @@ fair boot signature Arguments: -- `VALIDATOR_ID` - The ID of the validator requesting the signature. +* `VALIDATOR_ID` - The ID of the validator requesting the signature. #### Fair Boot Migrate @@ -746,11 +746,11 @@ fair boot migrate [ENV_FILEPATH] [--yes] Arguments: -- `ENV_FILEPATH` - Path to the .env file. +* `ENV_FILEPATH` - Path to the .env file. Options: -- `--yes` - Migrate without confirmation. +* `--yes` - Migrate without confirmation. ### Fair Node commands @@ -798,7 +798,7 @@ fair node signature Arguments: -- `VALIDATOR_ID` - The ID of the validator requesting the signature. +* `VALIDATOR_ID` - The ID of the validator requesting the signature. #### Fair Node Backup @@ -810,7 +810,7 @@ fair node backup Arguments: -- `BACKUP_FOLDER_PATH` - Path to the folder where the backup file will be saved. +* `BACKUP_FOLDER_PATH` - Path to the folder where the backup file will be saved. #### Fair Node Restore @@ -822,32 +822,32 @@ fair node restore [--config-only] Arguments: -- `BACKUP_PATH` - Path to the archive. -- `ENV_FILE` - Path to the .env file for the restored node configuration. +* `BACKUP_PATH` - Path to the archive. +* `ENV_FILE` - Path to the .env file for the restored node configuration. Options: -- `--config-only` - Only restore configuration files. +* `--config-only` - Only restore configuration files. ---- +*** ## Exit codes Exit codes conventions for SKALE CLI tools -- `0` - Everything is OK -- `1` - General error exit code -- `3` - Bad API response\*\* -- `4` - Script execution error\*\* -- `5` - Transaction error\* -- `6` - Revert error\* -- `7` - Bad user error\*\* -- `8` - Node state error\*\* +* `0` - Everything is OK +* `1` - General error exit code +* `3` - Bad API response\*\* +* `4` - Script execution error\*\* +* `5` - Transaction error\* +* `6` - Revert error\* +* `7` - Bad user error\*\* +* `8` - Node state error\*\* `*` - `validator-cli` only\ `**` - `node-cli` only ---- +*** ## Development @@ -855,8 +855,8 @@ Exit codes conventions for SKALE CLI tools #### Dependencies -- Python 3.11 -- Git +* Python 3.11 +* Git #### Clone the repository From c03df761dca59305e859e74ba6c0ffb126621d9b Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 15 Jul 2025 17:09:54 +0100 Subject: [PATCH 140/332] Improve cleanup --- node_cli/core/schains.py | 45 +++++++++++++++++++------------------ node_cli/operations/fair.py | 6 +++-- node_cli/utils/helper.py | 11 +++++++++ 3 files changed, 38 insertions(+), 24 deletions(-) diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index eb420fdb..c103d5bb 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -263,25 +263,26 @@ def cleanup_datadir_for_single_chain_node( f'No data directory found in {base_path}. ' 'Please check the path or specify a chain name.' ) - chain_name = folders[0] - base_path = os.path.join(base_path, chain_name) - regular_folders_pattern = f'{base_path}/[!snapshots]*' - logger.info('Removing regular folders') - for filepath in glob.glob(regular_folders_pattern): - if os.path.isdir(filepath): - logger.debug('Removing recursively %s', filepath) - shutil.rmtree(filepath) - if os.path.isfile(filepath): - os.remove(filepath) - - logger.info('Removing subvolumes') - subvolumes_pattern = f'{base_path}/snapshots/*/*' - for filepath in glob.glob(subvolumes_pattern): - logger.debug('Deleting subvolume %s', filepath) - if os.path.isdir(filepath): - rm_btrfs_subvolume(filepath) - else: - os.remove(filepath) - logger.info('Cleaning up snapshots folder') - if os.path.isdir(base_path): - shutil.rmtree(base_path) + for folder_name in folders[0]: + base_path = os.path.join(base_path, folder_name) + if folder_name != 'shared-space': + regular_folders_pattern = f'{base_path}/[!snapshots]*' + logger.info('Removing regular folders') + for filepath in glob.glob(regular_folders_pattern): + if os.path.isdir(filepath): + logger.debug('Removing recursively %s', filepath) + shutil.rmtree(filepath) + if os.path.isfile(filepath): + os.remove(filepath) + + logger.info('Removing subvolumes') + subvolumes_pattern = f'{base_path}/snapshots/*/*' + for filepath in glob.glob(subvolumes_pattern): + logger.debug('Deleting subvolume %s', filepath) + if os.path.isdir(filepath): + rm_btrfs_subvolume(filepath) + else: + os.remove(filepath) + logger.info('Cleaning up snapshots folder') + if os.path.isdir(base_path): + shutil.rmtree(base_path) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 0471c399..795ee741 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -27,6 +27,7 @@ from node_cli.configs import ( CONTAINER_CONFIG_PATH, GLOBAL_SKALE_DIR, + NFTABLES_CHAIN_FOLDER_PATH, SKALE_DIR, ) from node_cli.core.checks import CheckType @@ -36,8 +37,8 @@ from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_datadir_for_single_chain_node -from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot from node_cli.fair.record.chain_record import migrate_chain_record +from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot from node_cli.operations.base import checked_host, turn_off from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive from node_cli.operations.config_repo import ( @@ -55,7 +56,7 @@ remove_dynamic_containers, wait_for_container, ) -from node_cli.utils.helper import rm_dir, str_to_bool +from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool from node_cli.utils.meta import FairCliMetaManager from node_cli.utils.print_formatters import print_failed_requirements_checks @@ -244,4 +245,5 @@ def cleanup(env) -> None: cleanup_datadir_for_single_chain_node() rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) + cleanup_dir_content(NFTABLES_CHAIN_FOLDER_PATH) cleanup_docker_configuration() diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 7adfba3b..bff97fa6 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -324,6 +324,17 @@ def rm_dir(folder: str) -> None: logger.info(f"{folder} doesn't exist, skipping...") +def cleanup_dir_content(folder: str) -> None: + if os.path.exists(folder): + logger.info('Removing contents of %s') + for filename in os.listdir(folder): + file_path = os.path.join(folder, filename) + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + + def safe_mkdir(path: str, print_res: bool = False) -> None: if os.path.exists(path): logger.debug(f'Directory {path} already exists') From 66e36ef1b3ae09c76dd03d6ba61920ce31644fae Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 16 Jul 2025 11:54:15 +0100 Subject: [PATCH 141/332] Fix node-cli build in publish pipeline --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 3459f78f..57852a13 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -107,7 +107,7 @@ jobs: mkdir -p ${{ github.workspace }}/dist docker build . -t node-cli-builder docker run --rm -v ${{ github.workspace }}/dist:/app/dist node-cli-builder \ - scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} ${{ matrix.build_type }} + bash scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} ${{ matrix.build_type }} echo "Contents of dist directory:" ls -altr ${{ github.workspace }}/dist/ docker rm -f $(docker ps -aq) || true From 761edf844370657ba099381bd12a1e6e41dec641 Mon Sep 17 00:00:00 2001 From: badrogger Date: Thu, 17 Jul 2025 13:13:15 +0100 Subject: [PATCH 142/332] Fix and improve datadir cleanup code --- node_cli/core/schains.py | 56 +++++++++++++++++++-------------- node_cli/operations/base.py | 4 +-- node_cli/operations/fair.py | 4 +-- tests/core/core_schains_test.py | 4 +-- 4 files changed, 38 insertions(+), 30 deletions(-) diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index c103d5bb..17bc0c42 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -253,36 +253,44 @@ def ensure_schain_volume(schain: str, schain_type: str, env_type: str) -> None: logger.warning('Volume %s already exists', schain) -def cleanup_datadir_for_single_chain_node( +def cleanup_datadir_content(datadir_path: str) -> None: + regular_folders_pattern = f'{datadir_path}/[!snapshots]*' + logger.info('Removing regular folders') + for path in glob.glob(regular_folders_pattern): + if os.path.isdir(path): + logger.debug('Removing recursively %s', path) + shutil.rmtree(path) + if os.path.isfile(path): + os.remove(path) + + logger.info('Removing subvolumes') + subvolumes_pattern = f'{datadir_path}/snapshots/*/*' + for path in glob.glob(subvolumes_pattern): + logger.debug('Deleting subvolume %s', path) + if os.path.isdir(path): + rm_btrfs_subvolume(path) + else: + os.remove(path) + + logger.info('Removing snapshots folder') + shutil.rmtree(os.path.join(datadir_path, 'snapshots'), ignore_errors=True) + + +def cleanup_no_lvm_datadir( chain_name: str = '', base_path: str = SCHAINS_MNT_DIR_SINGLE_CHAIN ) -> None: - if not chain_name: + if chain_name: + folders = [chain_name] + else: folders = [f for f in os.listdir(base_path) if os.path.isdir(os.path.join(base_path, f))] if not folders: raise NoDataDirForChainError( f'No data directory found in {base_path}. ' 'Please check the path or specify a chain name.' ) - for folder_name in folders[0]: - base_path = os.path.join(base_path, folder_name) + for folder_name in folders: + folder_path = os.path.join(base_path, folder_name) if folder_name != 'shared-space': - regular_folders_pattern = f'{base_path}/[!snapshots]*' - logger.info('Removing regular folders') - for filepath in glob.glob(regular_folders_pattern): - if os.path.isdir(filepath): - logger.debug('Removing recursively %s', filepath) - shutil.rmtree(filepath) - if os.path.isfile(filepath): - os.remove(filepath) - - logger.info('Removing subvolumes') - subvolumes_pattern = f'{base_path}/snapshots/*/*' - for filepath in glob.glob(subvolumes_pattern): - logger.debug('Deleting subvolume %s', filepath) - if os.path.isdir(filepath): - rm_btrfs_subvolume(filepath) - else: - os.remove(filepath) - logger.info('Cleaning up snapshots folder') - if os.path.isdir(base_path): - shutil.rmtree(base_path) + cleanup_datadir_content(folder_path) + if os.path.isdir(folder_path): + shutil.rmtree(folder_path) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index a5e29a4c..cd9a51b2 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -44,7 +44,7 @@ from node_cli.core.node_options import NodeOptions from node_cli.core.resources import init_shared_space_volume, update_resource_allocation from node_cli.core.schains import ( - cleanup_datadir_for_single_chain_node, + cleanup_no_lvm_datadir, update_node_cli_schain_status, ) from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive @@ -425,6 +425,6 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): def cleanup_sync(env, schain_name: str) -> None: turn_off(env, node_type=NodeType.SYNC) - cleanup_datadir_for_single_chain_node(schain_name=schain_name) + cleanup_no_lvm_datadir(schain_name=schain_name) rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 795ee741..4bc841d9 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -36,7 +36,7 @@ from node_cli.core.host import ensure_btrfs_kernel_module_autoloaded, link_env_file, prepare_host from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config -from node_cli.core.schains import cleanup_datadir_for_single_chain_node +from node_cli.core.schains import cleanup_no_lvm_datadir from node_cli.fair.record.chain_record import migrate_chain_record from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot from node_cli.operations.base import checked_host, turn_off @@ -242,7 +242,7 @@ def restore_fair(env, backup_path, config_only=False): def cleanup(env) -> None: turn_off(env, node_type=NodeType.FAIR) - cleanup_datadir_for_single_chain_node() + cleanup_no_lvm_datadir() rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) cleanup_dir_content(NFTABLES_CHAIN_FOLDER_PATH) diff --git a/tests/core/core_schains_test.py b/tests/core/core_schains_test.py index c6ce426b..c9281adb 100644 --- a/tests/core/core_schains_test.py +++ b/tests/core/core_schains_test.py @@ -4,7 +4,7 @@ import freezegun -from node_cli.core.schains import cleanup_datadir_for_single_chain_node, toggle_schain_repair_mode +from node_cli.core.schains import cleanup_no_lvm_datadir, toggle_schain_repair_mode from node_cli.utils.helper import read_json from tests.helper import CURRENT_DATETIME, CURRENT_TIMESTAMP @@ -81,5 +81,5 @@ def test_cleanup_sync_datadir(tmp_sync_datadir): hash_path.touch() with mock.patch('node_cli.core.schains.rm_btrfs_subvolume'): - cleanup_datadir_for_single_chain_node(schain_name, base_path=tmp_sync_datadir) + cleanup_no_lvm_datadir(schain_name, base_path=tmp_sync_datadir) assert not os.path.isdir(base_folder) From 0cfefbe6836774bea1917ccbce08eee8e137f4d0 Mon Sep 17 00:00:00 2001 From: badrogger Date: Thu, 17 Jul 2025 19:09:52 +0100 Subject: [PATCH 143/332] Fix files cleanup --- node_cli/core/schains.py | 26 +++++++++++++++++--------- node_cli/fair/fair_node.py | 1 - node_cli/utils/helper.py | 11 ++++++++++- scripts/build.sh | 0 4 files changed, 27 insertions(+), 11 deletions(-) mode change 100644 => 100755 scripts/build.sh diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 17bc0c42..fe3aeeb4 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -40,6 +40,7 @@ from node_cli.utils.helper import ( error_exit, get_request, + is_btrfs_subvolume, read_json, run_cmd, safe_load_yml, @@ -255,24 +256,29 @@ def ensure_schain_volume(schain: str, schain_type: str, env_type: str) -> None: def cleanup_datadir_content(datadir_path: str) -> None: regular_folders_pattern = f'{datadir_path}/[!snapshots]*' - logger.info('Removing regular folders') + logger.info('Removing regular folders of %s', datadir_path) for path in glob.glob(regular_folders_pattern): + logger.debug('Removing recursively %s', path) + if os.path.isfile(path): + logger.debug('Deleting file in datadir: %s', path) + os.remove(path) if os.path.isdir(path): - logger.debug('Removing recursively %s', path) + logger.debug('Deleting folder in datadir: %s', path) shutil.rmtree(path) - if os.path.isfile(path): - os.remove(path) - logger.info('Removing subvolumes') + logger.info('Removing subvolumes of %s', datadir_path) subvolumes_pattern = f'{datadir_path}/snapshots/*/*' for path in glob.glob(subvolumes_pattern): - logger.debug('Deleting subvolume %s', path) - if os.path.isdir(path): + if is_btrfs_subvolume(path): + logger.debug('Deleting subvolume %s', path) rm_btrfs_subvolume(path) - else: + if os.path.isfile(path): + logger.debug('Deleting file in snapshots directory: %s', path) os.remove(path) + if os.path.isdir(path): + logger.debug('Deleting folder in snapshots directory %s', path) + shutil.rmtree(path) - logger.info('Removing snapshots folder') shutil.rmtree(os.path.join(datadir_path, 'snapshots'), ignore_errors=True) @@ -291,6 +297,8 @@ def cleanup_no_lvm_datadir( for folder_name in folders: folder_path = os.path.join(base_path, folder_name) if folder_name != 'shared-space': + logger.info('Removing datadir content for %s', folder_path) cleanup_datadir_content(folder_path) + logger.info('Removing datadir content for %s', folder_path) if os.path.isdir(folder_path): shutil.rmtree(folder_path) diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/fair_node.py index e32149e9..0d08fd4c 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/fair_node.py @@ -129,7 +129,6 @@ def request_repair(snapshot_from: str = '') -> None: print(TEXTS['fair']['node']['repair']['repair_requested']) -@check_inited @check_user def cleanup() -> None: env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index bff97fa6..638b63a3 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -326,7 +326,7 @@ def rm_dir(folder: str) -> None: def cleanup_dir_content(folder: str) -> None: if os.path.exists(folder): - logger.info('Removing contents of %s') + logger.info('Removing contents of %s', folder) for filename in os.listdir(folder): file_path = os.path.join(folder, filename) if os.path.isfile(file_path) or os.path.islink(file_path): @@ -413,3 +413,12 @@ def get_ssh_port(ssh_service_name='ssh'): def is_contract_address(value: str) -> bool: return bool(re.fullmatch(r'0x[a-fA-F0-9]{40}', value)) + + +def is_btrfs_subvolume(path: str) -> bool: + """Check if the given path is a Btrfs subvolume.""" + try: + output = run_cmd(['btrfs', 'subvolume', 'show', path], check_code=False) + return output.returncode == 0 + except subprocess.CalledProcessError: + return False diff --git a/scripts/build.sh b/scripts/build.sh old mode 100644 new mode 100755 From 8a2f315363d734b715953931af9f378e5fd72339 Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 18 Jul 2025 18:31:12 +0100 Subject: [PATCH 144/332] Add fair repair command --- node_cli/cli/fair_node.py | 13 ++++----- node_cli/core/node.py | 48 ++++++++++++++++----------------- node_cli/fair/fair_node.py | 15 +++++------ node_cli/operations/__init__.py | 5 ++-- node_cli/operations/fair.py | 39 +++++++++++++++++++++++---- node_cli/utils/docker_utils.py | 24 ++++++++--------- 6 files changed, 85 insertions(+), 59 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index b6461871..a4e2cbf8 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -21,13 +21,13 @@ from node_cli.core.node import backup from node_cli.fair.fair_node import cleanup as fair_cleanup -from node_cli.fair.fair_node import init as init_fair from node_cli.fair.fair_node import ( + get_node_info, migrate_from_boot, - request_repair, + repair_chain, restore_fair, - get_node_info, ) +from node_cli.fair.fair_node import init as init_fair from node_cli.fair.fair_node import register as register_fair from node_cli.fair.fair_node import update as update_fair from node_cli.utils.helper import IP_TYPE, URL_TYPE, abort_if_false, streamed_cmd @@ -119,7 +119,7 @@ def migrate_node(env_filepath: str) -> None: @click.option( '--snapshot-from', type=URL_TYPE, - default='', + default=None, hidden=True, help=TEXTS['fair']['node']['repair']['snapshot_from'], ) @@ -130,8 +130,9 @@ def migrate_node(env_filepath: str) -> None: expose_value=False, prompt=TEXTS['fair']['node']['repair']['warning'], ) -def repair(snapshot_from: str = '') -> None: - request_repair(snapshot_from=snapshot_from) +@streamed_cmd +def repair(snapshot_from: str | None = None) -> None: + repair_chain(snapshot_from=snapshot_from) @node.command('cleanup', help='Cleanup Fair node.') diff --git a/node_cli/core/node.py b/node_cli/core/node.py index f5e0a16b..d803ef58 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -28,6 +28,7 @@ import docker +from node_cli.cli import __version__ from node_cli.configs import ( BACKUP_ARCHIVE_NAME, CONTAINER_CONFIG_PATH, @@ -41,49 +42,46 @@ SKALE_STATE_DIR, TM_INIT_TIMEOUT, ) -from node_cli.cli import __version__ -from node_cli.configs.user import get_validated_user_config, SKALE_DIR_ENV_FILEPATH from node_cli.configs.cli_logger import LOG_DATA_PATH as CLI_LOG_DATA_PATH - -from node_cli.core.host import is_node_inited, save_env_params, get_flask_secret_key +from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH, get_validated_user_config from node_cli.core.checks import run_checks as run_host_checks +from node_cli.core.host import get_flask_secret_key, is_node_inited, save_env_params from node_cli.core.resources import update_resource_allocation +from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 from node_cli.operations import ( + cleanup_sync_op, configure_nftables, - update_op, init_op, + init_sync_op, + restore_op, turn_off_op, turn_on_op, - restore_op, - init_sync_op, + update_op, update_sync_op, - cleanup_sync_op, ) -from node_cli.utils.print_formatters import ( - print_failed_requirements_checks, - print_node_cmd_error, - print_node_info, +from node_cli.utils.decorators import check_inited, check_not_inited, check_user +from node_cli.utils.docker_utils import ( + BASE_FAIR_BOOT_COMPOSE_SERVICES, + BASE_FAIR_COMPOSE_SERVICES, + BASE_SKALE_COMPOSE_SERVICES, + BASE_SYNC_COMPOSE_SERVICES, + is_admin_running, + is_api_running, ) +from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import ( error_exit, get_request, post_request, ) from node_cli.utils.meta import CliMetaManager -from node_cli.utils.texts import safe_load_texts -from node_cli.utils.exit_codes import CLIExitCodes -from node_cli.utils.decorators import check_not_inited, check_inited, check_user -from node_cli.utils.docker_utils import ( - is_admin_running, - is_api_running, - BASE_SKALE_COMPOSE_SERVICES, - BASE_SYNC_COMPOSE_SERVICES, - BASE_FAIR_COMPOSE_SERVICES, - BASE_FAIR_BOOT_COMPOSE_SERVICES, -) from node_cli.utils.node_type import NodeType -from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 - +from node_cli.utils.print_formatters import ( + print_failed_requirements_checks, + print_node_cmd_error, + print_node_info, +) +from node_cli.utils.texts import safe_load_texts logger = logging.getLogger(__name__) TEXTS = safe_load_texts() diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/fair_node.py index 0d08fd4c..d2513bf1 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/fair_node.py @@ -27,11 +27,11 @@ from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.host import is_node_inited, save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.fair.record.chain_record import get_fair_chain_record from node_cli.operations import ( FairUpdateType, cleanup_fair_op, init_fair_op, + repair_fair_op, restore_fair_op, update_fair_op, ) @@ -121,14 +121,6 @@ def update(env_filepath: str, pull_config_for_schain: str | None = None) -> None logger.info('Fair update completed successfully') -def request_repair(snapshot_from: str = '') -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) - record = get_fair_chain_record(env) - record.set_repair_ts(int(time.time())) - record.set_snapshot_from(snapshot_from) - print(TEXTS['fair']['node']['repair']['repair_requested']) - - @check_user def cleanup() -> None: env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) @@ -169,3 +161,8 @@ def register(ip: str) -> None: error_msg = payload logger.error(f'Registration error {error_msg}') error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +def repair_chain(snapshot_from: str | None = None) -> None: + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) + repair_fair_op(env=env, snapshot_from=snapshot_from) diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 4f3aa410..d0f60eed 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -32,8 +32,9 @@ ) from node_cli.operations.fair import ( # noqa init as init_fair_op, - update_fair as update_fair_op, + update as update_fair_op, FairUpdateType, - restore_fair as restore_fair_op, + restore as restore_fair_op, + repair as repair_fair_op, cleanup as cleanup_fair_op, ) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 4bc841d9..1f2df69d 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -37,7 +37,7 @@ from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_no_lvm_datadir -from node_cli.fair.record.chain_record import migrate_chain_record +from node_cli.fair.record.chain_record import get_fair_chain_record, migrate_chain_record from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot from node_cli.operations.base import checked_host, turn_off from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive @@ -53,12 +53,15 @@ compose_rm, compose_up, docker_cleanup, + is_admin_running, remove_dynamic_containers, + start_container_by_name, + stop_container_by_name, wait_for_container, ) from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool from node_cli.utils.meta import FairCliMetaManager -from node_cli.utils.print_formatters import print_failed_requirements_checks +from node_cli.utils.print_formatters import TEXTS, print_failed_requirements_checks logger = logging.getLogger(__name__) @@ -145,7 +148,7 @@ def update_fair_boot(env_filepath: str, env: dict) -> bool: @checked_host -def update_fair(env_filepath: str, env: dict, update_type: FairUpdateType) -> bool: +def update(env_filepath: str, env: dict, update_type: FairUpdateType) -> bool: compose_rm(node_type=NodeType.FAIR, env=env) if update_type not in (FairUpdateType.INFRA_ONLY, FairUpdateType.FROM_BOOT): remove_dynamic_containers() @@ -193,7 +196,7 @@ def update_fair(env_filepath: str, env: dict, update_type: FairUpdateType) -> bo return True -def restore_fair(env, backup_path, config_only=False): +def restore(env, backup_path, config_only=False): unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -240,10 +243,36 @@ def restore_fair(env, backup_path, config_only=False): return True -def cleanup(env) -> None: +def cleanup(env: dict) -> None: turn_off(env, node_type=NodeType.FAIR) cleanup_no_lvm_datadir() rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) cleanup_dir_content(NFTABLES_CHAIN_FOLDER_PATH) cleanup_docker_configuration() + + +def request_repair(env: dict, snapshot_from: str | None = None) -> None: + record = get_fair_chain_record(env) + record.set_repair_ts(int(time.time())) + if not snapshot_from: + snapshot_from = '' + record.set_snapshot_from(snapshot_from) + print(TEXTS['fair']['node']['repair']['repair_requested']) + + +def repair(env: dict, snapshot_from: str | None = None) -> None: + logger.info('Starting fair node repair') + container_name = 'fair_admin' + if is_admin_running(node_type=NodeType.FAIR): + logger.info('Stopping admin container') + stop_container_by_name(container_name=container_name) + logger.info('Removing chain container') + remove_dynamic_containers() + logger.info('Cleaning up datadir') + cleanup_no_lvm_datadir() + logger.info('Requesting fair node repair') + request_repair(env=env, snapshot_from=snapshot_from) + logger.info('Starting admin') + start_container_by_name(container_name=container_name) + logger.info('Fair node repair completed successfully') diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 6986d44e..6ded9e34 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -160,7 +160,7 @@ def safe_rm(container: Container, timeout=DOCKER_DEFAULT_STOP_TIMEOUT, **kwargs) logger.info(f'Container removed: {container_name}') -def stop_container( +def stop_container_by_name( container_name: str, timeout: int = DOCKER_DEFAULT_STOP_TIMEOUT, dclient: Optional[DockerClient] = None, @@ -171,7 +171,7 @@ def stop_container( container.stop(timeout=timeout) -def rm_container( +def remove_container_by_name( container_name: str, timeout: int = DOCKER_DEFAULT_STOP_TIMEOUT, dclient: Optional[DockerClient] = None, @@ -180,19 +180,21 @@ def rm_container( container_names = [container.name for container in get_containers()] if container_name in container_names: container = dc.containers.get(container_name) - safe_rm(container) + safe_rm(container, timeout=timeout) -def start_container(container_name: str, dclient: Optional[DockerClient] = None) -> None: +def start_container_by_name(container_name: str, dclient: Optional[DockerClient] = None) -> None: dc = dclient or docker_client() container = dc.containers.get(container_name) logger.info('Starting container %s', container_name) container.start() -def remove_schain_container(schain_name: str, dclient: Optional[DockerClient] = None) -> None: +def remove_schain_container_by_name( + schain_name: str, dclient: Optional[DockerClient] = None +) -> None: container_name = f'skale_schain_{schain_name}' - rm_container(container_name, timeout=SCHAIN_REMOVE_TIMEOUT, dclient=dclient) + remove_container_by_name(container_name, timeout=SCHAIN_REMOVE_TIMEOUT, dclient=dclient) def backup_container_logs( @@ -401,14 +403,12 @@ def is_api_running(node_type: NodeType, dclient: Optional[DockerClient] = None) def is_admin_running(node_type: NodeType, client: Optional[DockerClient] = None) -> bool: + container_name = 'skale_admin' if node_type == NodeType.FAIR: - result = is_container_running(name='fair_admin', dclient=client) + container_name = 'fair_admin' elif node_type == NodeType.SYNC: - result = is_container_running(name='skale_sync_admin', dclient=client) - else: - result = is_container_running(name='skale_admin', dclient=client) - - return result + container_name = 'skale_sync_admin' + return is_container_running(name=container_name, dclient=client) def system_prune(): From a5523455f09f4e2c7aa6587be714399861c313aa Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 18 Jul 2025 19:01:37 +0100 Subject: [PATCH 145/332] Disable test that test nothing --- tests/fair/fair_node_test.py | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index fab00451..5d0c13bb 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -1,16 +1,14 @@ from unittest import mock -import freezegun import pytest from node_cli.configs import SKALE_DIR from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.fair.fair_boot import init as init_boot from node_cli.fair.fair_boot import update -from node_cli.fair.fair_node import cleanup, migrate_from_boot, request_repair, restore_fair +from node_cli.fair.fair_node import cleanup, migrate_from_boot, restore_fair from node_cli.operations.fair import FairUpdateType from node_cli.utils.node_type import NodeType -from tests.helper import CURRENT_DATETIME, CURRENT_TIMESTAMP @mock.patch('node_cli.fair.fair_node.time.sleep') @@ -131,18 +129,6 @@ def test_migrate_from_boot( ) -@freezegun.freeze_time(CURRENT_DATETIME) -@mock.patch('node_cli.fair.fair_node.compose_node_env', return_value={'ENV_TYPE': 'devnet'}) -@mock.patch('node_cli.fair.record.chain_record.get_fair_chain_name', return_value='test') -def test_fair_repair(compose_node_env_mock, get_static_params_mock, redis_client, inited_node): - request_repair() - assert redis_client.get('test_repair_ts') == f'{CURRENT_TIMESTAMP}'.encode('utf-8') - assert redis_client.get('test_snapshot_from') == b'' - request_repair(snapshot_from='127.0.0.1') - assert redis_client.get('test_repair_ts') == f'{CURRENT_TIMESTAMP}'.encode('utf-8') - assert redis_client.get('test_snapshot_from') == b'127.0.0.1' - - @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) @mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') @mock.patch('node_cli.fair.fair_node.cleanup_fair_op') @@ -203,9 +189,7 @@ def test_cleanup_calls_operations_in_correct_order( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) @mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') -@mock.patch( - 'node_cli.fair.fair_node.cleanup_fair_op', side_effect=Exception('Cleanup failed') -) +@mock.patch('node_cli.fair.fair_node.cleanup_fair_op', side_effect=Exception('Cleanup failed')) @mock.patch('node_cli.fair.fair_node.compose_node_env') def test_cleanup_continues_after_fair_op_error( mock_compose_env, From 06de31d46cfb29f304a545d5299cf0d3a965823c Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 21 Jul 2025 17:53:07 +0100 Subject: [PATCH 146/332] Use only snapshot_from field --- node_cli/cli/fair_node.py | 4 ++-- node_cli/fair/fair_node.py | 2 +- node_cli/operations/fair.py | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index a4e2cbf8..aa510023 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -119,7 +119,7 @@ def migrate_node(env_filepath: str) -> None: @click.option( '--snapshot-from', type=URL_TYPE, - default=None, + default='any', hidden=True, help=TEXTS['fair']['node']['repair']['snapshot_from'], ) @@ -131,7 +131,7 @@ def migrate_node(env_filepath: str) -> None: prompt=TEXTS['fair']['node']['repair']['warning'], ) @streamed_cmd -def repair(snapshot_from: str | None = None) -> None: +def repair(snapshot_from: str = 'any') -> None: repair_chain(snapshot_from=snapshot_from) diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/fair_node.py index d2513bf1..f16dbdbe 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/fair_node.py @@ -163,6 +163,6 @@ def register(ip: str) -> None: error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) -def repair_chain(snapshot_from: str | None = None) -> None: +def repair_chain(snapshot_from: str = 'any') -> None: env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) repair_fair_op(env=env, snapshot_from=snapshot_from) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 1f2df69d..9c92667f 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -252,16 +252,16 @@ def cleanup(env: dict) -> None: cleanup_docker_configuration() -def request_repair(env: dict, snapshot_from: str | None = None) -> None: +def request_repair(env: dict, snapshot_from: str = 'any') -> None: record = get_fair_chain_record(env) - record.set_repair_ts(int(time.time())) + # record.set_repair_ts(int(time.time())) if not snapshot_from: - snapshot_from = '' + snapshot_from = 'any' record.set_snapshot_from(snapshot_from) print(TEXTS['fair']['node']['repair']['repair_requested']) -def repair(env: dict, snapshot_from: str | None = None) -> None: +def repair(env: dict, snapshot_from: str = 'any') -> None: logger.info('Starting fair node repair') container_name = 'fair_admin' if is_admin_running(node_type=NodeType.FAIR): From aef6cf8623cf89dc8fc8c5b3efdd1abf5ec5196a Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 21 Jul 2025 19:57:50 +0100 Subject: [PATCH 147/332] Rename request_repair to trigger_skaled_snapshot_mode --- node_cli/operations/fair.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 9c92667f..ab1fb3f9 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -252,9 +252,8 @@ def cleanup(env: dict) -> None: cleanup_docker_configuration() -def request_repair(env: dict, snapshot_from: str = 'any') -> None: +def trigger_skaled_snapshot_mode(env: dict, snapshot_from: str = 'any') -> None: record = get_fair_chain_record(env) - # record.set_repair_ts(int(time.time())) if not snapshot_from: snapshot_from = 'any' record.set_snapshot_from(snapshot_from) @@ -272,7 +271,7 @@ def repair(env: dict, snapshot_from: str = 'any') -> None: logger.info('Cleaning up datadir') cleanup_no_lvm_datadir() logger.info('Requesting fair node repair') - request_repair(env=env, snapshot_from=snapshot_from) + trigger_skaled_snapshot_mode(env=env, snapshot_from=snapshot_from) logger.info('Starting admin') start_container_by_name(container_name=container_name) logger.info('Fair node repair completed successfully') From 0183dea8814b67c4faa6739d32f9da8b24ca6ae9 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 24 Jul 2025 20:07:24 +0100 Subject: [PATCH 148/332] Add `change-ip` command --- node_cli/cli/fair_node.py | 11 +++++++++++ node_cli/fair/fair_node.py | 19 +++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index aa510023..23694adb 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -30,6 +30,7 @@ from node_cli.fair.fair_node import init as init_fair from node_cli.fair.fair_node import register as register_fair from node_cli.fair.fair_node import update as update_fair +from node_cli.fair.fair_node import change_ip as change_ip_fair from node_cli.utils.helper import IP_TYPE, URL_TYPE, abort_if_false, streamed_cmd from node_cli.utils.texts import safe_load_texts @@ -146,3 +147,13 @@ def repair(snapshot_from: str = 'any') -> None: @streamed_cmd def cleanup_node(): fair_cleanup() + + +@node.command('change-ip', help=TEXTS['fair']['node']['change-ip']['help']) +@click.option('--ip', + required=True, + type=IP_TYPE, + help=TEXTS['fair']['node']['change-ip']['ip'] +) +def change_ip(ip: str) -> None: + change_ip_fair(ip=ip) diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/fair_node.py index f16dbdbe..1e33a6d5 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/fair_node.py @@ -166,3 +166,22 @@ def register(ip: str) -> None: def repair_chain(snapshot_from: str = 'any') -> None: env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) repair_fair_op(env=env, snapshot_from=snapshot_from) + + +@check_inited +@check_user +def change_ip(ip: str) -> None: + if not is_node_inited(): + print(TEXTS['fair']['node']['not_inited']) + return + + json_data = {'ip': ip, 'port': DEFAULT_SKALED_BASE_PORT} + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='change-ip', json=json_data) + if status == 'ok': + msg = TEXTS['fair']['node']['ip_changed'] + logger.info(msg) + print(msg) + else: + error_msg = payload + logger.error(f'Change IP error {error_msg}') + error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) From 4a719774c0c1efff183279804850ae8dbaee5a3f Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 24 Jul 2025 20:22:34 +0100 Subject: [PATCH 149/332] Add texts for `change-ip` command --- text.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/text.yml b/text.yml index c93a743e..30315dbe 100644 --- a/text.yml +++ b/text.yml @@ -91,3 +91,7 @@ fair: help: Register node in fair manager name: Name of the node in fair manager ip: IP address of the node in fair manager + ip_changed: Node IP changed in fair manager + change-ip: + help: Change the node IP in fair manager + ip: New IP address of the node in fair manager \ No newline at end of file From c8fe17c074847fdb646b79e7ee9dc033b5489bfc Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 24 Jul 2025 20:26:44 +0100 Subject: [PATCH 150/332] Improve texts for Fair commands --- text.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/text.yml b/text.yml index 30315dbe..db7617d8 100644 --- a/text.yml +++ b/text.yml @@ -80,18 +80,18 @@ lvmpy: fair: node: repair: - help: Repair fair chain node - warning: Are you sure you want to repair fair chain node? In rare cases may cause data loss and require additional maintenance + help: Repair Fair chain node + warning: Are you sure you want to repair Fair chain node? In rare cases may cause data loss and require additional maintenance snapshot_from: IP of the node to take snapshot from repair_requested: Repair mode is requested not_inited: Node should be initialized to proceed with operation - registered: Node is registered in fair manager. + registered: Node is registered in Fair manager. register: - help: Register node in fair manager - name: Name of the node in fair manager - ip: IP address of the node in fair manager - ip_changed: Node IP changed in fair manager + help: Register node in Fair manager + name: Name of the node in Fair manager + ip: IP address of the node in Fair manager + ip_changed: Node IP changed in Fair manager change-ip: - help: Change the node IP in fair manager - ip: New IP address of the node in fair manager \ No newline at end of file + help: Change the node IP in Fair manager + ip: New IP address of the node in Fair manager \ No newline at end of file From d21fd7aba9a33ac539ed9c4f1491b84f9f56d1ce Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 24 Jul 2025 20:28:44 +0100 Subject: [PATCH 151/332] Fit code to PEP 8 --- node_cli/cli/fair_node.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 23694adb..ae2abe18 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -150,10 +150,11 @@ def cleanup_node(): @node.command('change-ip', help=TEXTS['fair']['node']['change-ip']['help']) -@click.option('--ip', - required=True, - type=IP_TYPE, - help=TEXTS['fair']['node']['change-ip']['ip'] +@click.option( + '--ip', + required=True, + type=IP_TYPE, + help=TEXTS['fair']['node']['change-ip']['ip'] ) def change_ip(ip: str) -> None: change_ip_fair(ip=ip) From f05aafe7e2a1feb29e7decf506264f9d9814394d Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 25 Jul 2025 19:13:51 +0100 Subject: [PATCH 152/332] Add `change-ip` to ROUTES --- node_cli/configs/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index 8afc10d9..942cdfd4 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -40,7 +40,7 @@ 'schains': ['config', 'list', 'dkg-statuses', 'firewall-rules', 'repair', 'get'], 'ssl': ['status', 'upload'], 'wallet': ['info', 'send-eth'], - 'fair-node': ['info', 'register'], + 'fair-node': ['info', 'register', 'change-ip'], } } From 1251c42b05533d5728860db13bc38bb4ed21ac8b Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Sat, 26 Jul 2025 16:25:38 +0100 Subject: [PATCH 153/332] Replace decorator @click.option for `change-ip` by @click.argument --- node_cli/cli/fair_node.py | 8 +++----- text.yml | 3 +-- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index ae2abe18..48fbb482 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -150,11 +150,9 @@ def cleanup_node(): @node.command('change-ip', help=TEXTS['fair']['node']['change-ip']['help']) -@click.option( - '--ip', - required=True, - type=IP_TYPE, - help=TEXTS['fair']['node']['change-ip']['ip'] +@click.argument( + 'ip', + type=IP_TYPE ) def change_ip(ip: str) -> None: change_ip_fair(ip=ip) diff --git a/text.yml b/text.yml index db7617d8..9aa99951 100644 --- a/text.yml +++ b/text.yml @@ -93,5 +93,4 @@ fair: ip: IP address of the node in Fair manager ip_changed: Node IP changed in Fair manager change-ip: - help: Change the node IP in Fair manager - ip: New IP address of the node in Fair manager \ No newline at end of file + help: Change the node IP in Fair manager \ No newline at end of file From 300d3c1be99340c0ad6c121eec18f811d5785b4c Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 28 Jul 2025 12:55:01 +0100 Subject: [PATCH 154/332] Fix tests --- tests/routes_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/routes_test.py b/tests/routes_test.py index 872c53e1..3dd87f84 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -33,6 +33,7 @@ '/api/v1/wallet/send-eth', '/api/v1/fair-node/info', '/api/v1/fair-node/register', + '/api/v1/fair-node/change-ip', ] From 9467dad9601a7ac4a0d6b0d163183d2ae14c38da Mon Sep 17 00:00:00 2001 From: badrogger Date: Mon, 28 Jul 2025 19:32:16 +0100 Subject: [PATCH 155/332] Fix firewall chain moving during migrate --- node_cli/migrations/fair/from_boot.py | 20 ++++++++++++-------- node_cli/operations/fair.py | 4 +++- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/node_cli/migrations/fair/from_boot.py b/node_cli/migrations/fair/from_boot.py index 6affdff3..784908f3 100644 --- a/node_cli/migrations/fair/from_boot.py +++ b/node_cli/migrations/fair/from_boot.py @@ -1,4 +1,3 @@ -import glob import logging import os from pathlib import Path @@ -21,6 +20,7 @@ def rename_chain_file(old_filepath: str, new_filepath: str) -> None: new_path = Path(new_filepath) if not old_path.exists(): raise NoLegacyNFTChainConfigError(f'File {old_filepath} does not exists') + old_path.rename(Path(new_path)) @@ -35,24 +35,28 @@ def rename_chain_in_config(config_path: str, old_chain_name: str, new_chain_name f.write(updated_content) -def migrate_nft_chain() -> None: - after_boot_chain_path = glob.glob(os.path.join(NFT_CHAIN_BASE_PATH, '*'))[0] - old_chain_name = Path(after_boot_chain_path).name.removesuffix('.conf') +def migrate_nft_chain(chain_name: str) -> None: + after_boot_chain_path = os.path.join(NFT_CHAIN_BASE_PATH, f'skale-{chain_name}.conf') new_chain_name = NFT_COMMITTEE_SCOPE_CHAIN_NAME - rename_chain_in_config(after_boot_chain_path, old_chain_name, new_chain_name) after_migration_chain_path = os.path.join( NFT_CHAIN_BASE_PATH, f'{NFT_COMMITTEE_SCOPE_CHAIN_NAME}.conf' ) - rename_chain_file(after_boot_chain_path, after_migration_chain_path) + logger.debug('Renaming %s to %s', after_boot_chain_path, after_migration_chain_path) + if os.path.isfile(after_boot_chain_path): + rename_chain_in_config(after_boot_chain_path, f'skale-{chain_name}', new_chain_name) + if os.path.isfile(after_migration_chain_path): + os.remove(after_boot_chain_path) + else: + rename_chain_file(after_boot_chain_path, after_migration_chain_path) def reload_nft(): run_cmd(['nft', '-f', '/etc/nftables.conf']) -def migrate_nftables_from_boot(): +def migrate_nftables_from_boot(chain_name: str): logger.info('Starting nftables migration from boot') - migrate_nft_chain() + migrate_nft_chain(chain_name=chain_name) logger.info('Reloading nftables rules') reload_nft() logger.info('Restart docker service') diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index ab1fb3f9..91e8ca1c 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -37,6 +37,7 @@ from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_no_lvm_datadir +from node_cli.core.static_config import get_fair_chain_name from node_cli.fair.record.chain_record import get_fair_chain_record, migrate_chain_record from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot from node_cli.operations.base import checked_host, turn_off @@ -181,8 +182,9 @@ def update(env_filepath: str, env: dict, update_type: FairUpdateType) -> bool: distro.version(), ) + fair_chain_name = get_fair_chain_name(env) if update_type == FairUpdateType.FROM_BOOT: - migrate_nftables_from_boot() + migrate_nftables_from_boot(chain_name=fair_chain_name) update_images(env=env, node_type=NodeType.FAIR) From 2ada60e3104977cf2cdbca41147a7db0e60607ca Mon Sep 17 00:00:00 2001 From: badrogger Date: Tue, 29 Jul 2025 13:48:15 +0100 Subject: [PATCH 156/332] Fix fair repair snapshot-from validation --- node_cli/cli/fair_node.py | 4 ++-- node_cli/utils/helper.py | 10 ++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index aa510023..3fd6bc66 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -30,7 +30,7 @@ from node_cli.fair.fair_node import init as init_fair from node_cli.fair.fair_node import register as register_fair from node_cli.fair.fair_node import update as update_fair -from node_cli.utils.helper import IP_TYPE, URL_TYPE, abort_if_false, streamed_cmd +from node_cli.utils.helper import IP_TYPE, URL_OR_ANY_TYPE, abort_if_false, streamed_cmd from node_cli.utils.texts import safe_load_texts TEXTS = safe_load_texts() @@ -118,7 +118,7 @@ def migrate_node(env_filepath: str) -> None: @node.command('repair', help='Toggle fair chain repair mode') @click.option( '--snapshot-from', - type=URL_TYPE, + type=URL_OR_ANY_TYPE, default='any', hidden=True, help=TEXTS['fair']['node']['repair']['snapshot_from'], diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 638b63a3..2eb778fb 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -382,6 +382,15 @@ def convert(self, value, param, ctx): return value +class UrlOrAnyType(click.ParamType): + name = 'url' + + def convert(self, value, param, ctx): + if value == 'any': + return value + super().convert(value, param, ctx) + + class IpType(click.ParamType): name = 'ip' @@ -394,6 +403,7 @@ def convert(self, value, param, ctx): URL_TYPE = UrlType() +URL_OR_ANY_TYPE = UrlOrAnyType() IP_TYPE = IpType() From cf04391ff92a1a34d3e547dbaa72a22a77af6be0 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 31 Jul 2025 13:16:57 +0100 Subject: [PATCH 157/332] Rename CONTAINER_CONFIGS_STREAM to NODE_VERSION --- README.md | 26 ++++++++++++------------ node_cli/configs/user.py | 2 +- node_cli/fair/record/chain_record.py | 2 +- node_cli/operations/base.py | 30 ++++++++++++++-------------- node_cli/operations/fair.py | 16 +++++++-------- tests/conftest.py | 10 +++++----- 6 files changed, 43 insertions(+), 43 deletions(-) diff --git a/README.md b/README.md index 9336f431..8d1e924c 100644 --- a/README.md +++ b/README.md @@ -45,9 +45,9 @@ Ensure that the following packages are installed: **docker**, **docker-compose** This binary (`skale-VERSION-OS`) is used for managing standard SKALE validator nodes. ```shell -# Replace {version} with the desired release version (e.g., 2.6.0) -VERSION_NUM={version} && \ -sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m` > /usr/local/bin/skale" +# Replace {version} with the desired release version (e.g., 3.0.0) +CLI_VERSION={version} && \ +sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$CLI_VERSION/skale-$CLI_VERSION-`uname -s`-`uname -m` > /usr/local/bin/skale" ``` ### Sync Node Binary @@ -55,19 +55,19 @@ sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/downl This binary (`skale-VERSION-OS-sync`) is used for managing dedicated Sync nodes. **Ensure you download the correct `-sync` suffixed binary for Sync node operations.** ```shell -# Replace {version} with the desired release version (e.g., 2.6.0) -VERSION_NUM={version} && \ -sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-sync > /usr/local/bin/skale" +# Replace {version} with the desired release version (e.g., 3.0.0) +CLI_VERSION={version} && \ +sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$CLI_VERSION/skale-$CLI_VERSION-`uname -s`-`uname -m`-sync > /usr/local/bin/skale" ``` ### Fair Node Binary -This binary (`skale-VERSION-OS-fair`) is used specifically for managing nodes on the Fair network. It is named `fair`. +This binary (`skale-VERSION-OS-fair`) is used specifically for managing nodes on the Fair network. ```shell -# Replace {version} with the desired release version (e.g., 2.6.0) -VERSION_NUM={version} && \ -sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$VERSION_NUM/skale-$VERSION_NUM-`uname -s`-`uname -m`-fair > /usr/local/bin/fair" +# Replace {version} with the desired release version (e.g., 3.0.0) +CLI_VERSION={version} && \ +sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$CLI_VERSION/skale-$CLI_VERSION-`uname -s`-`uname -m`-fair > /usr/local/bin/fair" ``` ### Permissions and Testing @@ -155,7 +155,7 @@ Required environment variables in `ENV_FILE`: * `SGX_SERVER_URL` - SGX server URL. * `DISK_MOUNTPOINT` - Mount point for storing sChains data. * `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy` to use. -* `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` to use. +* `NODE_VERSION` - Stream of `skale-node` to use. * `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager `message_proxy_mainnet` contract alias or address. * `IMA_CONTRACTS` - IMA `skale_manager` contract alias or address. @@ -595,7 +595,7 @@ Required environment variables in `ENV_FILE`: * `DISK_MOUNTPOINT` - Mount point for storing sChain data. * `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy`. -* `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node`. +* `NODE_VERSION` - Stream of `skale-node`. * `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager alias or address. * `IMA_CONTRACTS` - IMA alias or address. @@ -695,7 +695,7 @@ Required environment variables in `ENV_FILE`: * `SGX_SERVER_URL` - SGX server URL. * `DISK_MOUNTPOINT` - Mount point for storing data (BTRFS recommended). -* `CONTAINER_CONFIGS_STREAM` - Stream of `skale-node` configs. +* `NODE_VERSION` - Stream of `skale-node` configs. * `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager alias or address. * `IMA_CONTRACTS` - IMA alias or address (*Note: Required by boot service, may not be used by Fair itself*). diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 49f44d58..eb8764ed 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -44,7 +44,7 @@ class ValidationResult(NamedTuple): @dataclass(kw_only=True) class BaseUserConfig(ABC): - container_configs_stream: str + node_version: str env_type: str filebeat_host: str disk_mountpoint: str diff --git a/node_cli/fair/record/chain_record.py b/node_cli/fair/record/chain_record.py index a97ca9fe..3884d197 100644 --- a/node_cli/fair/record/chain_record.py +++ b/node_cli/fair/record/chain_record.py @@ -75,7 +75,7 @@ def get_fair_chain_record(env: dict) -> ChainRecord: def migrate_chain_record(env: dict) -> None: - version = env['CONTAINER_CONFIGS_STREAM'] + version = env['NODE_VERSION'] logger.info('Migrating fair chain record, setting config version to %s', version) record = get_fair_chain_record(env) record.set_config_version(version) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index cd9a51b2..df7eec45 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -76,7 +76,7 @@ def checked_host(func): @functools.wraps(func) def wrapper(env_filepath: str, env: Dict, *args, **kwargs): - download_skale_node(env.get('CONTAINER_CONFIGS_STREAM'), env.get('CONTAINER_CONFIGS_DIR')) + download_skale_node(env.get('NODE_VERSION'), env.get('CONTAINER_CONFIGS_DIR')) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], TYPE, @@ -130,17 +130,17 @@ def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: meta_manager = CliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + if not skip_cleanup and current_stream != env['NODE_VERSION']: logger.info( 'Stream version was changed from %s to %s', current_stream, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], ) docker_cleanup() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], env['DOCKER_LVMPY_STREAM'], distro.id(), distro.version(), @@ -173,17 +173,17 @@ def update_fair_boot(env_filepath: str, env: Dict) -> bool: meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + if not skip_cleanup and current_stream != env['NODE_VERSION']: logger.info( 'Stream version was changed from %s to %s', current_stream, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], ) docker_cleanup() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], distro.id(), distro.version(), ) @@ -216,7 +216,7 @@ def init(env_filepath: str, env: dict, node_type: NodeType) -> None: meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], env['DOCKER_LVMPY_STREAM'], distro.id(), distro.version(), @@ -250,7 +250,7 @@ def init_fair_boot(env_filepath: str, env: dict) -> None: meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], distro.id(), distro.version(), ) @@ -268,7 +268,7 @@ def init_sync( snapshot_from: Optional[str], ) -> None: cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) - download_skale_node(env.get('CONTAINER_CONFIGS_STREAM'), env.get('CONTAINER_CONFIGS_DIR')) + download_skale_node(env.get('NODE_VERSION'), env.get('CONTAINER_CONFIGS_DIR')) sync_skale_node() if env.get('SKIP_DOCKER_CONFIG') != 'True': @@ -296,7 +296,7 @@ def init_sync( meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], None, distro.id(), distro.version(), @@ -317,7 +317,7 @@ def update_sync(env_filepath: str, env: Dict) -> bool: compose_rm(env=env, node_type=NodeType.SYNC) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) - download_skale_node(env['CONTAINER_CONFIGS_STREAM'], env.get('CONTAINER_CONFIGS_DIR')) + download_skale_node(env['NODE_VERSION'], env.get('CONTAINER_CONFIGS_DIR')) sync_skale_node() if env.get('SKIP_DOCKER_CONFIG') != 'True': @@ -336,7 +336,7 @@ def update_sync(env_filepath: str, env: Dict) -> bool: meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], env['DOCKER_LVMPY_STREAM'], distro.id(), distro.version(), @@ -359,7 +359,7 @@ def turn_on(env: dict, node_type: NodeType) -> None: meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], env['DOCKER_LVMPY_STREAM'], distro.id(), distro.version(), @@ -402,7 +402,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], env['DOCKER_LVMPY_STREAM'], distro.id(), distro.version(), diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 91e8ca1c..f7f708ee 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -95,7 +95,7 @@ def init(env_filepath: str, env: dict) -> bool: meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], distro.id(), distro.version(), ) @@ -129,17 +129,17 @@ def update_fair_boot(env_filepath: str, env: dict) -> bool: meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + if not skip_cleanup and current_stream != env['NODE_VERSION']: logger.info( 'Stream version was changed from %s to %s', current_stream, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], ) docker_cleanup() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], distro.id(), distro.version(), ) @@ -167,17 +167,17 @@ def update(env_filepath: str, env: dict, update_type: FairUpdateType) -> bool: meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['CONTAINER_CONFIGS_STREAM']: + if not skip_cleanup and current_stream != env['NODE_VERSION']: logger.info( 'Stream version was changed from %s to %s', current_stream, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], ) docker_cleanup() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], distro.id(), distro.version(), ) @@ -224,7 +224,7 @@ def restore(env, backup_path, config_only=False): meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, - env['CONTAINER_CONFIGS_STREAM'], + env['NODE_VERSION'], distro.id(), distro.version(), ) diff --git a/tests/conftest.py b/tests/conftest.py index 05da0fae..24c85444 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -242,7 +242,7 @@ def valid_env_params(): 'DB_USER': 'user', 'DB_PASSWORD': 'pass', 'DB_PORT': '3307', - 'CONTAINER_CONFIGS_STREAM': 'master', + 'NODE_VERSION': 'master', 'FILEBEAT_HOST': '127.0.0.1:3010', 'SGX_SERVER_URL': 'http://127.0.0.1', 'DISK_MOUNTPOINT': '/dev/sss', @@ -307,7 +307,7 @@ def regular_user_conf(tmp_path): try: test_env = """ ENDPOINT=http://localhost:8545 - CONTAINER_CONFIGS_STREAM='main' + NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 DISK_MOUNTPOINT=/dev/sss @@ -329,7 +329,7 @@ def fair_user_conf(tmp_path): try: test_env = """ BOOT_ENDPOINT=http://localhost:8545 - CONTAINER_CONFIGS_STREAM='main' + NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 DISK_MOUNTPOINT=/dev/sss @@ -350,7 +350,7 @@ def fair_boot_user_conf(tmp_path): try: test_env = """ ENDPOINT=http://localhost:8545 - CONTAINER_CONFIGS_STREAM='main' + NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 DISK_MOUNTPOINT=/dev/sss @@ -371,7 +371,7 @@ def sync_user_conf(tmp_path): try: test_env = """ ENDPOINT=http://localhost:8545 - CONTAINER_CONFIGS_STREAM='main' + NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 DISK_MOUNTPOINT=/dev/sss ENV_TYPE='devnet' From 2f0e02aaf0fabc9d23764248d517db8fa26b2a8f Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 31 Jul 2025 13:30:54 +0100 Subject: [PATCH 158/332] Update fair node cli readme - add new command, update boot/node commands --- README.md | 312 ++++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 280 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index 8d1e924c..bdd3d0e4 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,9 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 1. [Top level commands (Fair)](#top-level-commands-fair) 2. [Fair Boot commands](#fair-boot-commands) 3. [Fair Node commands](#fair-node-commands) + 4. [Fair Wallet commands](#fair-wallet-commands) + 5. [Fair Logs commands](#fair-logs-commands) + 6. [Fair SSL commands](#fair-ssl-commands) 5. [Exit codes](#exit-codes) 6. [Development](#development) @@ -679,17 +682,29 @@ Options: Commands for a Fair node in the Boot phase. +#### Fair Boot Info + +Get information about the Fair node during boot phase. + +```shell +fair boot info [--format FORMAT] +``` + +Options: + +* `--format`/`-f` - Output format (`json` or `text`). + #### Fair Boot Initialization Initialize the Fair node boot phase. ```shell -fair boot init [ENV_FILE] +fair boot init ``` Arguments: -* `ENV_FILE` - path to .env file (required). +* `ENV_FILE` - Path to the environment file containing configuration. Required environment variables in `ENV_FILE`: @@ -714,19 +729,16 @@ Register the Fair node with Fair Manager *during* the boot phase. fair boot register --name --ip --domain [--port ] ``` -Required arguments: - -* `--name`/`-n` - Fair node name. -* `--ip` - Public IP for RPC connections and consensus. -* `--domain`/`-d` - Fair node domain name (e.g., `fair1.example.com`). - -Optional arguments: +Options: -* `--port`/`-p` - Base port for node sChains (default: `10000`). +* `--name`/`-n` - Fair node name (required). +* `--ip` - Public IP for RPC connections & consensus (required). +* `--domain`/`-d` - Fair node domain name (e.g., `fair1.example.com`, required). +* `--port`/`-p` - Base port for node sChains (default: from configuration). #### Fair Boot Signature -Get the node signature for a validator ID *during* the boot phase. +Get the node signature for a validator ID during boot phase. ```shell fair boot signature @@ -736,21 +748,37 @@ Arguments: * `VALIDATOR_ID` - The ID of the validator requesting the signature. -#### Fair Boot Migrate +#### Fair Boot Update -Migrate the Fair node from the boot phase to the main phase (regular operation). +Update the Fair node software during boot phase. ```shell -fair boot migrate [ENV_FILEPATH] [--yes] +fair boot update [--yes] [--pull-config SCHAIN] ``` Arguments: -* `ENV_FILEPATH` - Path to the .env file. +* `ENV_FILE` - Path to the environment file for node configuration. + +Required environment variables in `ENV_FILE`: + +* `SGX_SERVER_URL` - SGX server URL. +* `DISK_MOUNTPOINT` - Mount point for storing data (BTRFS recommended). +* `NODE_VERSION` - Stream of `skale-node` configs. +* `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. +* `MANAGER_CONTRACTS` - SKALE Manager alias or address. +* `IMA_CONTRACTS` - IMA alias or address (*Note: Required by boot service, may not be used by Fair itself*). +* `FILEBEAT_HOST` - URL/IP:Port of the Filebeat log server. +* `ENV_TYPE` - Environment type (e.g., 'mainnet', 'devnet'). + +Optional variables: + +* `MONITORING_CONTAINERS` - Enable monitoring containers (`cadvisor`, `node-exporter`). Options: -* `--yes` - Migrate without confirmation. +* `--yes` - Update without confirmation prompt. +* `--pull-config` - Pull configuration for specific sChain (hidden option). ### Fair Node commands @@ -758,47 +786,129 @@ Options: Commands for managing a Fair node during its regular operation (main phase). -#### Fair Node Initialization (Placeholder) +#### Fair Node Info + +Get information about the Fair node. + +```shell +fair node info [--format FORMAT] +``` + +Options: + +* `--format`/`-f` - Output format (`json` or `text`). + +#### Fair Node Initialization Initialize the regular operation phase of the Fair node. ```shell -fair node init +fair node init ``` -> **Note:** This command is currently a placeholder and not implemented. +Arguments: -#### Fair Node Registration (Placeholder) +* `ENV_FILEPATH` - Path to the environment file for node configuration. -Register the node during regular operation. +Required environment variables in `ENV_FILEPATH`: + +* `FAIR_CONTRACTS` - Fair contracts alias or address (e.g., `mainnet`). +* `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). +* `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). +* `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). +* `DISK_MOUNTPOINT` - Mount point for storing data (e.g., `/dev/sdc`). +* `ENV_TYPE` - Environment type (e.g., `mainnet`). + +Optional variables: + +* `ENFORCE_BTRFS` - Format existing filesystem on attached disk (`True`/`False`). +* `FILEBEAT_HOST` - URL of the Filebeat log server to send logs. + +#### Fair Node Registration + +Register the Fair node with the specified IP address. ```shell -fair node register +fair node register --ip ``` -> **Note:** This command is currently a placeholder and not implemented. +Options: + +* `--ip` - Public IP address for the Fair node (required). -#### Fair Node Update (Placeholder) +#### Fair Node Update -Update the Fair node during regular operation. +Update the Fair node software. ```shell -fair node update [ENV_FILEPATH] [--yes] [--unsafe] +fair node update [--yes] [--pull-config SCHAIN] ``` -> **Note:** This command is currently a placeholder and not implemented. +Arguments: + +* `ENV_FILEPATH` - Path to the environment file for node configuration. -#### Fair Node Signature +Required environment variables in `ENV_FILEPATH`: -Get the node signature for a validator ID during regular operation. +* `FAIR_CONTRACTS` - Fair contracts alias or address (e.g., `mainnet`). +* `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). +* `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). +* `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). +* `DISK_MOUNTPOINT` - Mount point for storing data (e.g., `/dev/sdc`). +* `ENV_TYPE` - Environment type (e.g., `mainnet`). + +Optional variables: + +* `ENFORCE_BTRFS` - Format existing filesystem on attached disk (`True`/`False`). +* `FILEBEAT_HOST` - URL of the Filebeat log server to send logs. + +Options: + +* `--yes` - Update without confirmation prompt. +* `--pull-config` - Pull configuration for specific sChain (hidden option). + +#### Fair Node Migrate + +Switch from boot phase to regular Fair node operation. ```shell -fair node signature +fair node migrate [--yes] ``` Arguments: -* `VALIDATOR_ID` - The ID of the validator requesting the signature. +* `ENV_FILEPATH` - Path to the environment file for node configuration. + +Required environment variables in `ENV_FILEPATH`: + +* `FAIR_CONTRACTS` - Fair contracts alias or address (e.g., `mainnet`). +* `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). +* `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). +* `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). +* `DISK_MOUNTPOINT` - Mount point for storing data (e.g., `/dev/sdc`). +* `ENV_TYPE` - Environment type (e.g., `mainnet`). + +Optional variables: + +* `ENFORCE_BTRFS` - Format existing filesystem on attached disk (`True`/`False`). +* `FILEBEAT_HOST` - URL of the Filebeat log server to send logs. + +Options: + +* `--yes` - Migrate without confirmation prompt. + +#### Fair Node Repair + +Toggle fair chain repair mode. + +```shell +fair node repair [--snapshot-from SOURCE] [--yes] +``` + +Options: + +* `--snapshot-from` - Source for snapshots (`any` by default, hidden option). +* `--yes` - Proceed without confirmation prompt. #### Fair Node Backup @@ -822,13 +932,151 @@ fair node restore [--config-only] Arguments: -* `BACKUP_PATH` - Path to the archive. +* `BACKUP_PATH` - Path to the backup archive. * `ENV_FILE` - Path to the .env file for the restored node configuration. Options: * `--config-only` - Only restore configuration files. +#### Fair Node Cleanup + +Cleanup Fair node data and configuration. + +```shell +fair node cleanup [--yes] +``` + +Options: + +* `--yes` - Cleanup without confirmation prompt. + +#### Fair Node Change IP + +Change the IP address of the Fair node. + +```shell +fair node change-ip +``` + +Arguments: + +* `IP_ADDRESS` - New public IP address for the Fair node. + +### Fair Wallet commands + +> Prefix: `fair wallet` + +Commands for managing the node wallet. + +#### Fair Wallet Info + +Get information about the SKALE node wallet. + +```shell +fair wallet info [--format FORMAT] +``` + +Options: + +* `--format`/`-f` - Output format (`json` or `text`). + +#### Fair Wallet Send + +Send ETH from SKALE node wallet to an address. + +```shell +fair wallet send
[--yes] +``` + +Arguments: + +* `ADDRESS` - Destination address for ETH transfer. +* `AMOUNT` - Amount of ETH to send (as float). + +Options: + +* `--yes` - Send without confirmation prompt. + +### Fair Logs commands + +> Prefix: `fair logs` + +Commands for managing and accessing node logs. + +#### Fair CLI Logs + +Fetch the logs of the node-cli. + +```shell +fair logs cli [--debug] +``` + +Options: + +* `--debug` - Show debug logs instead of regular logs. + +#### Fair Logs Dump + +Dump all logs from the connected node. + +```shell +fair logs dump [--container CONTAINER] +``` + +Arguments: + +* `PATH` - Path where the logs dump will be saved. + +Options: + +* `--container`/`-c` - Dump logs only from specified container. + +### Fair SSL commands + +> Prefix: `fair ssl` + +Commands for managing SSL certificates for sChains. + +#### Fair SSL Status + +Check the status of SSL certificates on the node. + +```shell +fair ssl status +``` + +#### Fair SSL Upload + +Upload SSL certificate files to the node. + +```shell +fair ssl upload --cert-path --key-path [--force] +``` + +Options: + +* `--cert-path`/`-c` - Path to the SSL certificate file (required). +* `--key-path`/`-k` - Path to the SSL private key file (required). +* `--force`/`-f` - Overwrite existing certificates. + +#### Fair SSL Check + +Check SSL certificate validity and connectivity. + +```shell +fair ssl check [--cert-path CERT_PATH] [--key-path KEY_PATH] [--port PORT] [--type TYPE] [--no-client] [--no-wss] +``` + +Options: + +* `--cert-path`/`-c` - Path to the certificate file (default: system default). +* `--key-path`/`-k` - Path to the key file (default: system default). +* `--port`/`-p` - Port to start SSL health check server (default: from configuration). +* `--type`/`-t` - Check type: `all`, `openssl`, or `skaled` (default: `all`). +* `--no-client` - Skip client connection for openssl check. +* `--no-wss` - Skip WSS server starting for skaled check. + *** ## Exit codes From 9ea3c1634cc3e8288718c0cc3dc0cc735b99be63 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 31 Jul 2025 20:02:35 +0100 Subject: [PATCH 159/332] Add force-skaled-start option for fair node update --- node_cli/cli/fair_node.py | 9 +++++++-- node_cli/fair/fair_node.py | 18 +++++++++++++++--- node_cli/fair/record/chain_record.py | 14 ++++++++++++++ node_cli/operations/fair.py | 15 ++++++++++++--- 4 files changed, 48 insertions(+), 8 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 708da591..8c3ebcdf 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -76,9 +76,14 @@ def register(ip: str) -> None: prompt='Are you sure you want to update Fair node software?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) +@click.option('--force-skaled-start', 'force_skaled_start', hidden=True, type=bool, default=False) @streamed_cmd -def update_node(env_filepath: str, pull_config_for_schain): - update_fair(env_filepath=env_filepath, pull_config_for_schain=pull_config_for_schain) +def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: bool): + update_fair( + env_filepath=env_filepath, + pull_config_for_schain=pull_config_for_schain, + force_skaled_start=force_skaled_start, + ) @node.command('backup', help='Generate backup file for the Fair node.') diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/fair_node.py index 1e33a6d5..1b69a50c 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/fair_node.py @@ -103,8 +103,15 @@ def migrate_from_boot( @check_inited @check_user -def update(env_filepath: str, pull_config_for_schain: str | None = None) -> None: - logger.info('Updating fair node...') +def update( + env_filepath: str, pull_config_for_schain: str | None = None, force_skaled_start: bool = False +) -> None: + logger.info( + 'Updating fair node: %s, pull_config_for_schain: %s, force_skaled_start: %s', + env_filepath, + pull_config_for_schain, + force_skaled_start, + ) env = compose_node_env( env_filepath, inited_node=True, @@ -112,7 +119,12 @@ def update(env_filepath: str, pull_config_for_schain: str | None = None) -> None node_type=NodeType.FAIR, pull_config_for_schain=pull_config_for_schain, ) - update_ok = update_fair_op(env_filepath, env, update_type=FairUpdateType.REGULAR) + update_ok = update_fair_op( + env_filepath, + env, + update_type=FairUpdateType.REGULAR, + force_skaled_start=force_skaled_start, + ) alive = is_base_containers_alive(node_type=NodeType.FAIR) if not update_ok or not alive: print_node_cmd_error() diff --git a/node_cli/fair/record/chain_record.py b/node_cli/fair/record/chain_record.py index 3884d197..d4ac740c 100644 --- a/node_cli/fair/record/chain_record.py +++ b/node_cli/fair/record/chain_record.py @@ -34,6 +34,7 @@ 'repair_date': FieldInfo('repair_date', datetime, datetime.fromtimestamp(0)), 'repair_ts': FieldInfo('repair_ts', int, None), 'snapshot_from': FieldInfo('snapshot_from', str, None), + 'force_skaled_start': FieldInfo('force_skaled_start', bool, False), } @@ -57,6 +58,10 @@ def snapshot_from(self) -> str | None: def repair_ts(self) -> int | None: return cast(int | None, self._get_field('repair_ts')) + @property + def force_skaled_start(self) -> bool: + return cast(bool, self._get_field('force_skaled_start')) + def set_config_version(self, version: str) -> None: self._set_field('config_version', version) @@ -69,6 +74,9 @@ def set_snapshot_from(self, value: str | None) -> None: def set_repair_ts(self, value: int | None) -> None: self._set_field('repair_ts', value) + def set_force_skaled_start(self, value: bool) -> None: + self._set_field('force_skaled_start', value) + def get_fair_chain_record(env: dict) -> ChainRecord: return ChainRecord(get_fair_chain_name(env)) @@ -79,3 +87,9 @@ def migrate_chain_record(env: dict) -> None: logger.info('Migrating fair chain record, setting config version to %s', version) record = get_fair_chain_record(env) record.set_config_version(version) + + +def update_chain_record(env: dict, force_skaled_start: bool) -> None: + record = get_fair_chain_record(env) + record.set_force_skaled_start(force_skaled_start) + logger.info('Updated fair chain record with force_skaled_start=%s', force_skaled_start) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index f7f708ee..3455c21a 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -38,7 +38,11 @@ from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_no_lvm_datadir from node_cli.core.static_config import get_fair_chain_name -from node_cli.fair.record.chain_record import get_fair_chain_record, migrate_chain_record +from node_cli.fair.record.chain_record import ( + get_fair_chain_record, + migrate_chain_record, + update_chain_record, +) from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot from node_cli.operations.base import checked_host, turn_off from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive @@ -149,7 +153,12 @@ def update_fair_boot(env_filepath: str, env: dict) -> bool: @checked_host -def update(env_filepath: str, env: dict, update_type: FairUpdateType) -> bool: +def update( + env_filepath: str, + env: dict, + update_type: FairUpdateType, + force_skaled_start: bool, +) -> bool: compose_rm(node_type=NodeType.FAIR, env=env) if update_type not in (FairUpdateType.INFRA_ONLY, FairUpdateType.FROM_BOOT): remove_dynamic_containers() @@ -193,7 +202,7 @@ def update(env_filepath: str, env: dict, update_type: FairUpdateType) -> bool: time.sleep(REDIS_START_TIMEOUT) if update_type == FairUpdateType.FROM_BOOT: migrate_chain_record(env) - + update_chain_record(env, force_skaled_start=force_skaled_start) compose_up(env=env, node_type=NodeType.FAIR) return True From 204e6d1f0fcf766a361df7a960a22a084fdd520d Mon Sep 17 00:00:00 2001 From: Dmytro Date: Fri, 1 Aug 2025 13:22:14 +0100 Subject: [PATCH 160/332] Fix force_skaled_start flag --- node_cli/cli/fair_node.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 8c3ebcdf..d99f7ddc 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -76,7 +76,14 @@ def register(ip: str) -> None: prompt='Are you sure you want to update Fair node software?', ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) -@click.option('--force-skaled-start', 'force_skaled_start', hidden=True, type=bool, default=False) +@click.option( + '--force-skaled-start', + 'force_skaled_start', + hidden=True, + type=bool, + default=False, + is_flag=True, +) @streamed_cmd def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: bool): update_fair( From b5c293314acf48168329547b8a7c152ce4489365 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 4 Aug 2025 16:23:21 +0100 Subject: [PATCH 161/332] Add fair node exit command --- node_cli/cli/fair_node.py | 13 ++++++++ node_cli/fair/fair_node.py | 22 ++++++++++++- tests/cli/fair_cli_test.py | 10 ++++++ tests/fair/fair_node_test.py | 62 ++++++++++++++++++++++++++++++++++++ text.yml | 6 +++- 5 files changed, 111 insertions(+), 2 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index d99f7ddc..5b303f17 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -22,6 +22,7 @@ from node_cli.core.node import backup from node_cli.fair.fair_node import change_ip as change_ip_fair from node_cli.fair.fair_node import cleanup as fair_cleanup +from node_cli.fair.fair_node import exit as exit_fair from node_cli.fair.fair_node import ( get_node_info, migrate_from_boot, @@ -165,3 +166,15 @@ def cleanup_node(): @click.argument('ip', type=IP_TYPE) def change_ip(ip: str) -> None: change_ip_fair(ip=ip) + + +@node.command('exit', help=TEXTS['fair']['node']['exit']['help']) +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt=TEXTS['fair']['node']['exit']['prompt'], +) +def exit_node() -> None: + exit_fair() diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/fair_node.py index 1b69a50c..e6f2fb1d 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/fair_node.py @@ -92,7 +92,9 @@ def migrate_from_boot( sync_schains=False, node_type=NodeType.FAIR, ) - migrate_ok = update_fair_op(env_filepath, env, update_type=FairUpdateType.FROM_BOOT) + migrate_ok = update_fair_op( + env_filepath, env, update_type=FairUpdateType.FROM_BOOT, force_skaled_start=False + ) alive = is_base_containers_alive(node_type=NodeType.FAIR) if not migrate_ok or not alive: print_node_cmd_error() @@ -197,3 +199,21 @@ def change_ip(ip: str) -> None: error_msg = payload logger.error(f'Change IP error {error_msg}') error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +@check_inited +@check_user +def exit() -> None: + if not is_node_inited(): + print(TEXTS['fair']['node']['not_inited']) + return + + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='exit', json={}) + if status == 'ok': + msg = TEXTS['fair']['node']['exited'] + logger.info(msg) + print(msg) + else: + error_msg = payload + logger.error(f'Node exit error {error_msg}') + error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/tests/cli/fair_cli_test.py b/tests/cli/fair_cli_test.py index d29ae357..51fa0a9b 100644 --- a/tests/cli/fair_cli_test.py +++ b/tests/cli/fair_cli_test.py @@ -11,6 +11,7 @@ from node_cli.cli.fair_node import ( backup_node, migrate_node, + exit_node, restore_node, ) @@ -101,3 +102,12 @@ def test_fair_node_migrate(mock_migrate_core, valid_env_file): assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' mock_migrate_core.assert_called_once_with(env_filepath=valid_env_file) + + +@mock.patch('node_cli.cli.fair_node.exit_fair') +def test_fair_node_exit(mock_exit_core): + runner = CliRunner() + result = runner.invoke(exit_node, ['--yes']) + + assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' + mock_exit_core.assert_called_once() diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 5d0c13bb..89d1c24b 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -257,3 +257,65 @@ def test_cleanup_logs_success_message( mock_logger.info.assert_called_once_with( 'Fair node was cleaned up, all containers and data removed' ) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.fair.fair_node.post_request') +@mock.patch('node_cli.fair.fair_node.is_node_inited', return_value=True) +def test_exit_success( + mock_is_inited, + mock_post_request, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + from node_cli.fair.fair_node import exit + + mock_post_request.return_value = ('ok', {}) + + exit() + + mock_post_request.assert_called_once_with(blueprint='fair-node', method='exit', json={}) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.fair.fair_node.error_exit') +@mock.patch('node_cli.fair.fair_node.post_request') +@mock.patch('node_cli.fair.fair_node.is_node_inited', return_value=True) +def test_exit_error( + mock_is_inited, + mock_post_request, + mock_error_exit, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, +): + from node_cli.fair.fair_node import exit + + error_msg = 'Exit failed' + mock_post_request.return_value = ('error', error_msg) + + exit() + + mock_post_request.assert_called_once_with(blueprint='fair-node', method='exit', json={}) + mock_error_exit.assert_called_once_with(error_msg, exit_code=mock.ANY) + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.fair.fair_node.is_node_inited', return_value=False) +def test_exit_not_inited( + mock_is_inited, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, + capsys, +): + from node_cli.fair.fair_node import exit + + exit() + + captured = capsys.readouterr() + assert 'Node should be initialized to proceed with operation' in captured.out diff --git a/text.yml b/text.yml index 9aa99951..560729a7 100644 --- a/text.yml +++ b/text.yml @@ -93,4 +93,8 @@ fair: ip: IP address of the node in Fair manager ip_changed: Node IP changed in Fair manager change-ip: - help: Change the node IP in Fair manager \ No newline at end of file + help: Change the node IP in Fair manager + exited: Node removed from Fair manager + exit: + help: Remove node from Fair manager + prompt: Are you sure you want to remove the node from Fair manager? From 98089f5ef939a835a9a6e017a271c1297ceb1c93 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 4 Aug 2025 16:24:50 +0100 Subject: [PATCH 162/332] Add copilot instructions --- .github/copilot-instructions.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/copilot-instructions.md diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000..416cfaf6 --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,16 @@ +- always keep the changes minimal and purposeful +- focus on fixing the exact problem or implementing the exact feature +- keep the code simple, do not write defensive code +- do not describe your changes in details after you made changes, focus on writing code +- do not generate any documentation, the code should be self-explanatory +- do not generate any in-line comments +- for the new files, always add a license header, same format as in the existing files +- no commented out code +- no console logs in production code +- no unused imports +- no redundant code - move repeated logic into helper functions +- use type hints to specify the expected types of function arguments and return values + +- check `ruff.toml` for formatting rules +- always lint changes using `ruff check` +- tests should be placed in `tests/` directory, follow the existing structure and code style \ No newline at end of file From 66e079fe4c85da71a93acacd6a053ff5b6b2b1f2 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 4 Aug 2025 17:08:23 +0100 Subject: [PATCH 163/332] Fix migrate from boot test --- tests/fair/fair_node_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 89d1c24b..19e8fe4c 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -125,7 +125,7 @@ def test_migrate_from_boot( node_type=NodeType.FAIR, ) mock_migrate_op.assert_called_once_with( - valid_env_file, mock_env, update_type=FairUpdateType.FROM_BOOT + valid_env_file, mock_env, update_type=FairUpdateType.FROM_BOOT, force_skaled_start=False ) From ec1f26500dad4ba21ef9a2e73981a113c071aa22 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 4 Aug 2025 19:30:28 +0100 Subject: [PATCH 164/332] Fix node exit route --- node_cli/configs/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index 942cdfd4..d595d8ea 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -40,7 +40,7 @@ 'schains': ['config', 'list', 'dkg-statuses', 'firewall-rules', 'repair', 'get'], 'ssl': ['status', 'upload'], 'wallet': ['info', 'send-eth'], - 'fair-node': ['info', 'register', 'change-ip'], + 'fair-node': ['info', 'register', 'change-ip', 'exit'], } } From 2fe8538ec23793e60da706dc828d498f3222773f Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 5 Aug 2025 12:37:06 +0100 Subject: [PATCH 165/332] Add streamed cmd decorator to node exit, fix api test --- node_cli/cli/fair_node.py | 2 ++ tests/routes_test.py | 1 + 2 files changed, 3 insertions(+) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 5b303f17..19b06c93 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -164,6 +164,7 @@ def cleanup_node(): @node.command('change-ip', help=TEXTS['fair']['node']['change-ip']['help']) @click.argument('ip', type=IP_TYPE) +@streamed_cmd def change_ip(ip: str) -> None: change_ip_fair(ip=ip) @@ -176,5 +177,6 @@ def change_ip(ip: str) -> None: expose_value=False, prompt=TEXTS['fair']['node']['exit']['prompt'], ) +@streamed_cmd def exit_node() -> None: exit_fair() diff --git a/tests/routes_test.py b/tests/routes_test.py index 3dd87f84..6a7918f0 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -34,6 +34,7 @@ '/api/v1/fair-node/info', '/api/v1/fair-node/register', '/api/v1/fair-node/change-ip', + '/api/v1/fair-node/exit', ] From e98c82bd89d5c9101f8b7ae2f40c4c2ebaf2aa86 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 5 Aug 2025 19:35:50 +0100 Subject: [PATCH 166/332] Add fair chain commands - checks and record --- README.md | 41 ++++++++++++++++--- node_cli/cli/chain.py | 44 ++++++++++++++++++++ node_cli/cli/ssl.py | 2 +- node_cli/configs/routes.py | 1 + node_cli/fair/chain.py | 65 ++++++++++++++++++++++++++++++ node_cli/main.py | 2 + node_cli/utils/print_formatters.py | 40 ++++++++++++++++++ 7 files changed, 189 insertions(+), 6 deletions(-) create mode 100644 node_cli/cli/chain.py create mode 100644 node_cli/fair/chain.py diff --git a/README.md b/README.md index bdd3d0e4..b2bad1bc 100644 --- a/README.md +++ b/README.md @@ -29,9 +29,10 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 1. [Top level commands (Fair)](#top-level-commands-fair) 2. [Fair Boot commands](#fair-boot-commands) 3. [Fair Node commands](#fair-node-commands) - 4. [Fair Wallet commands](#fair-wallet-commands) - 5. [Fair Logs commands](#fair-logs-commands) - 6. [Fair SSL commands](#fair-ssl-commands) + 4. [Fair Chain commands](#fair-chain-commands) + 5. [Fair Wallet commands](#fair-wallet-commands) + 6. [Fair Logs commands](#fair-logs-commands) + 7. [Fair SSL commands](#fair-ssl-commands) 5. [Exit codes](#exit-codes) 6. [Development](#development) @@ -841,7 +842,7 @@ Options: Update the Fair node software. ```shell -fair node update [--yes] [--pull-config SCHAIN] +fair node update [--yes] [--force-skaled-start] ``` Arguments: @@ -865,7 +866,7 @@ Optional variables: Options: * `--yes` - Update without confirmation prompt. -* `--pull-config` - Pull configuration for specific sChain (hidden option). +* `--force-skaled-start` - Force skaled container to start (hidden option). #### Fair Node Migrate @@ -963,6 +964,36 @@ Arguments: * `IP_ADDRESS` - New public IP address for the Fair node. +### Fair Chain commands + +> Prefix: `fair chain` + +Commands for managing and monitoring the Fair chain state and configuration. + +#### Fair Chain Record + +Get information about the Fair chain record, including chain name, configuration status, DKG status, and operational metadata. + +```shell +fair chain record [--json] +``` + +Options: + +* `--json` - Output in JSON format instead of formatted table. + +#### Fair Chain Checks + +Get the status of Fair chain checks, including configuration checks and skaled checks. + +```shell +fair chain checks [--json] +``` + +Options: + +* `--json` - Output in JSON format instead of formatted table. + ### Fair Wallet commands > Prefix: `fair wallet` diff --git a/node_cli/cli/chain.py b/node_cli/cli/chain.py new file mode 100644 index 00000000..10826236 --- /dev/null +++ b/node_cli/cli/chain.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import click + +from node_cli.fair.chain import get_chain_record, get_chain_checks + + +@click.group() +def chain_cli(): + pass + + +@chain_cli.group(help='Fair chain commands') +def chain(): + pass + + +@chain.command('record', help='Get Fair chain record information') +@click.option('--json', 'raw', is_flag=True, help='Output in JSON format') +def chain_record(raw: bool) -> None: + get_chain_record(raw=raw) + + +@chain.command('checks', help='Get Fair chain checks status') +@click.option('--json', 'raw', is_flag=True, help='Output in JSON format') +def chain_checks(raw: bool) -> None: + get_chain_checks(raw=raw) diff --git a/node_cli/cli/ssl.py b/node_cli/cli/ssl.py index e371e8ce..c22ac226 100644 --- a/node_cli/cli/ssl.py +++ b/node_cli/cli/ssl.py @@ -82,7 +82,7 @@ def upload(key_path, cert_path, force): @click.option( '--port', '-p', - help='Port to start ssl healtcheck server', + help='Port to start ssl healthcheck server', type=int, default=DEFAULT_SSL_CHECK_PORT, ) diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index d595d8ea..a1a17879 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -41,6 +41,7 @@ 'ssl': ['status', 'upload'], 'wallet': ['info', 'send-eth'], 'fair-node': ['info', 'register', 'change-ip', 'exit'], + 'fair-chain': ['record', 'checks'], } } diff --git a/node_cli/fair/chain.py b/node_cli/fair/chain.py new file mode 100644 index 00000000..8ca380ae --- /dev/null +++ b/node_cli/fair/chain.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import json +from typing import Any, Dict + +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit, get_request +from node_cli.utils.print_formatters import print_chain_record, print_chain_checks + +BLUEPRINT_NAME = 'fair-chain' + + +def get_chain_record_plain() -> Dict[str, Any]: + status, payload = get_request(blueprint=BLUEPRINT_NAME, method='record') + if status == 'ok': + if isinstance(payload, dict): + return payload.get('record', {}) + else: + error_exit('Invalid response format', exit_code=CLIExitCodes.BAD_API_RESPONSE) + else: + error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +def get_chain_record(raw: bool = False) -> None: + record = get_chain_record_plain() + if raw: + print(json.dumps(record, indent=4)) + else: + print_chain_record(record) + + +def get_chain_checks_plain() -> Dict[str, Any]: + status, payload = get_request(blueprint=BLUEPRINT_NAME, method='checks') + if status == 'ok': + if isinstance(payload, dict): + return payload + else: + error_exit('Invalid response format', exit_code=CLIExitCodes.BAD_API_RESPONSE) + else: + error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +def get_chain_checks(raw: bool = False) -> None: + checks = get_chain_checks_plain() + if raw: + print(json.dumps(checks, indent=4)) + else: + print_chain_checks(checks) diff --git a/node_cli/main.py b/node_cli/main.py index 28d87fe0..7fb833c0 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -40,6 +40,7 @@ from node_cli.cli.sync_node import sync_node_cli from node_cli.cli.fair_boot import fair_boot_cli from node_cli.cli.fair_node import fair_node_cli +from node_cli.cli.chain import chain_cli from node_cli.core.host import init_logs_dir from node_cli.utils.node_type import NodeType from node_cli.configs import LONG_LINE @@ -91,6 +92,7 @@ def get_sources_list() -> List[click.MultiCommand]: logs_cli, fair_boot_cli, fair_node_cli, + chain_cli, wallet_cli, ssl_cli, ] diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index 1fd1ca33..81e92be3 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -323,3 +323,43 @@ def print_meta_info(meta_info: CliMeta) -> None: {LONG_LINE} """) ) + + +def print_chain_record(record): + print( + inspect.cleandoc(f""" + {LONG_LINE} + Fair Chain Record + Chain Name: {record.get('name', 'N/A')} + Config Version: {record.get('config_version', 'N/A')} + Sync Config Run: {record.get('sync_config_run', 'N/A')} + First Run: {record.get('first_run', 'N/A')} + Backup Run: {record.get('backup_run', 'N/A')} + Restart Count: {record.get('restart_count', 'N/A')} + Failed RPC Count: {record.get('failed_rpc_count', 'N/A')} + Monitor Last Seen: {record.get('monitor_last_seen', 'N/A')} + SSL Change Date: {record.get('ssl_change_date', 'N/A')} + Repair Date: {record.get('repair_date', 'N/A')} + DKG Status: {record.get('dkg_status', 'N/A')} + Repair Timestamp: {record.get('repair_ts', 'N/A')} + Snapshot From: {record.get('snapshot_from', 'N/A')} + Restart Timestamp: {record.get('restart_ts', 'N/A')} + Force Skaled Start: {record.get('force_skaled_start', 'N/A')} + {LONG_LINE} + """) + ) + + +def print_chain_checks(checks): + def format_checks(check_dict, title): + print(f'\n{title}:') + for name, result in check_dict.items(): + status = 'PASS' if result else 'FAIL' + print(f' {name}: {status}') + + print(f'{LONG_LINE}') + print('Fair Chain Checks') + print(f'{LONG_LINE}') + format_checks(checks['config_checks'], 'Config Checks') + format_checks(checks['skaled_checks'], 'Skaled Checks') + print(f'{LONG_LINE}') From 1e2accca331960459797a4dd1d8b42a2d1e3a901 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 5 Aug 2025 20:33:11 +0100 Subject: [PATCH 167/332] Add string formatters --- node_cli/utils/print_formatters.py | 27 ++++++++++++++++++++++----- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index 81e92be3..821e51f6 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -325,6 +325,23 @@ def print_meta_info(meta_info: CliMeta) -> None: ) +def format_timestamp(value): + if value is None or value == 'N/A' or value == 0 or value == 0.0: + return 'N/A' + try: + timestamp = float(value) + if timestamp == 0: + return 'N/A' + dt = datetime.datetime.fromtimestamp(timestamp) + human_date = dt.strftime('%Y-%m-%d %H:%M:%S') + return f'{human_date} ({timestamp})' + except (ValueError, TypeError): + return str(value) + + +1 + + def print_chain_record(record): print( inspect.cleandoc(f""" @@ -337,13 +354,13 @@ def print_chain_record(record): Backup Run: {record.get('backup_run', 'N/A')} Restart Count: {record.get('restart_count', 'N/A')} Failed RPC Count: {record.get('failed_rpc_count', 'N/A')} - Monitor Last Seen: {record.get('monitor_last_seen', 'N/A')} - SSL Change Date: {record.get('ssl_change_date', 'N/A')} - Repair Date: {record.get('repair_date', 'N/A')} + Monitor Last Seen: {format_timestamp(record.get('monitor_last_seen', 'N/A'))} + SSL Change Date: {format_timestamp(record.get('ssl_change_date', 'N/A'))} + Repair Date: {format_timestamp(record.get('repair_date', 'N/A'))} DKG Status: {record.get('dkg_status', 'N/A')} - Repair Timestamp: {record.get('repair_ts', 'N/A')} + Repair Timestamp: {format_timestamp(record.get('repair_ts', 'N/A'))} Snapshot From: {record.get('snapshot_from', 'N/A')} - Restart Timestamp: {record.get('restart_ts', 'N/A')} + Restart Timestamp: {format_timestamp(record.get('restart_ts', 'N/A'))} Force Skaled Start: {record.get('force_skaled_start', 'N/A')} {LONG_LINE} """) From df49e5b85394275c589922c35abcbd952a0d3b2a Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 5 Aug 2025 21:15:23 +0100 Subject: [PATCH 168/332] Add fair chain routes to tests --- tests/routes_test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/routes_test.py b/tests/routes_test.py index 6a7918f0..bcf1bf98 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -35,6 +35,8 @@ '/api/v1/fair-node/register', '/api/v1/fair-node/change-ip', '/api/v1/fair-node/exit', + '/api/v1/fair-chain/record', + '/api/v1/fair-chain/checks', ] From 69a2d3748f4c8e04b2d13c350a94205376aada74 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 6 Aug 2025 13:19:04 +0100 Subject: [PATCH 169/332] Rename sync node to passive --- .github/workflows/publish.yml | 2 +- .github/workflows/test.yml | 8 +-- README.md | 66 +++++++++---------- .../cli/{sync_node.py => passive_node.py} | 28 ++++---- node_cli/configs/__init__.py | 2 +- node_cli/configs/user.py | 6 +- node_cli/core/nginx.py | 2 +- node_cli/core/node.py | 46 ++++++------- node_cli/main.py | 8 +-- node_cli/operations/__init__.py | 6 +- node_cli/operations/base.py | 20 +++--- node_cli/utils/docker_utils.py | 24 +++---- node_cli/utils/node_type.py | 2 +- scripts/build.sh | 6 +- scripts/generate_info.sh | 8 +-- tests/cli/node_test.py | 2 +- ...sync_node_test.py => passive_node_test.py} | 30 ++++----- tests/configs/configs_env_validate_test.py | 6 +- tests/conftest.py | 4 +- tests/core/core_node_test.py | 24 +++---- tests/core/core_schains_test.py | 6 +- tests/core/nginx_test.py | 6 +- text.yml | 8 +-- 23 files changed, 160 insertions(+), 160 deletions(-) rename node_cli/cli/{sync_node.py => passive_node.py} (76%) rename tests/cli/{sync_node_test.py => passive_node_test.py} (84%) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 57852a13..96290f1b 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -72,7 +72,7 @@ jobs: strategy: matrix: os: [ubuntu-22.04] - build_type: [normal, sync, fair] + build_type: [normal, passive, fair] steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1a230126..043c1792 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -47,15 +47,15 @@ jobs: - name: Check build - normal run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64 - - name: Build binary - sync + - name: Build binary - passive run: | mkdir -p ./dist docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test sync + docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test passive docker rm -f $(docker ps -aq) - - name: Check build - sync - run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-sync + - name: Check build - passive + run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-passive - name: Build binary - fair run: | diff --git a/README.md b/README.md index b2bad1bc..3b8b1b88 100644 --- a/README.md +++ b/README.md @@ -4,13 +4,13 @@ ![Test](https://github.com/skalenetwork/node-cli/workflows/Test/badge.svg) [![Discord](https://img.shields.io/discord/534485763354787851.svg)](https://discord.gg/vvUtWJB) -SKALE Node CLI, part of the SKALE suite of validator tools, is the command line interface to setup, register and maintain your SKALE node. It comes in three distinct build types: Standard (for validator nodes), Sync (for dedicated sChain synchronization), and Fair. +SKALE Node CLI, part of the SKALE suite of validator tools, is the command line interface to setup, register and maintain your SKALE node. It comes in three distinct build types: Standard (for validator nodes), Passive (for dedicated sChain synchronization), and Fair. ## Table of Contents 1. [Installation](#installation) 1. [Standard Node Binary](#standard-node-binary) - 2. [Sync Node Binary](#sync-node-binary) + 2. [Passive Node Binary](#passive-node-binary) 3. [Fair Node Binary](#fair-node-binary) 4. [Permissions and Testing](#permissions-and-testing) 2. [Standard Node Usage (`skale` - Normal Build)](#standard-node-usage-skale---normal-build) @@ -22,9 +22,9 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 6. [SSL commands (Standard)](#ssl-commands-standard) 7. [Logs commands (Standard)](#logs-commands-standard) 8. [Resources allocation commands (Standard)](#resources-allocation-commands-standard) -3. [Sync Node Usage (`skale` - Sync Build)](#sync-node-usage-skale---sync-build) - 1. [Top level commands (Sync)](#top-level-commands-sync) - 2. [Sync node commands](#sync-node-commands) +3. [Passive Node Usage (`skale` - Passive Build)](#passive-node-usage-skale---passive-build) + 1. [Top level commands (Passive)](#top-level-commands-passive) + 2. [Passive node commands](#passive-node-commands) 4. [Fair Node Usage (`fair`)](#fair-node-usage-fair) 1. [Top level commands (Fair)](#top-level-commands-fair) 2. [Fair Boot commands](#fair-boot-commands) @@ -54,14 +54,14 @@ CLI_VERSION={version} && \ sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$CLI_VERSION/skale-$CLI_VERSION-`uname -s`-`uname -m` > /usr/local/bin/skale" ``` -### Sync Node Binary +### Passive Node Binary -This binary (`skale-VERSION-OS-sync`) is used for managing dedicated Sync nodes. **Ensure you download the correct `-sync` suffixed binary for Sync node operations.** +This binary (`skale-VERSION-OS-passive`) is used for managing dedicated Passive nodes. **Ensure you download the correct `-passive` suffixed binary for Passive node operations.** ```shell # Replace {version} with the desired release version (e.g., 3.0.0) CLI_VERSION={version} && \ -sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$CLI_VERSION/skale-$CLI_VERSION-`uname -s`-`uname -m`-sync > /usr/local/bin/skale" +sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$CLI_VERSION/skale-$CLI_VERSION-`uname -s`-`uname -m`-passive > /usr/local/bin/skale" ``` ### Fair Node Binary @@ -79,7 +79,7 @@ sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/downl Apply executable permissions to the downloaded binary (adjust name accordingly): ```shell -# For Standard or Sync binary +# For Standard or Passive binary sudo chmod +x /usr/local/bin/skale # For Fair binary @@ -89,7 +89,7 @@ sudo chmod +x /usr/local/bin/fair Test the installation: ```shell -# Standard or Sync build +# Standard or Passive build skale --help # Fair build @@ -552,24 +552,24 @@ Options: *** -## Sync Node Usage (`skale` - Sync Build) +## Passive Node Usage (`skale` - Passive Build) -Commands available in the **sync `skale` binary** for managing dedicated Sync nodes. +Commands available in the **passive `skale` binary** for managing dedicated Passive nodes. Note that this binary contains a **different set of commands** compared to the standard build. -### Top level commands (Sync) +### Top level commands (Passive) -#### Info (Sync) +#### Info (Passive) -Print build info for the `skale` (sync) binary. +Print build info for the `skale` (passive) binary. ```shell skale info ``` -#### Version (Sync) +#### Version (Passive) -Print version number for the `skale` (sync) binary. +Print version number for the `skale` (passive) binary. ```shell skale version @@ -579,16 +579,16 @@ Options: * `--short` - prints version only, without additional text. -### Sync node commands +### Passive node commands -> Prefix: `skale sync-node` +> Prefix: `skale passive-node` -#### Sync node initialization +#### Passive node initialization -Initialize a dedicated Sync node on the current machine. +Initialize a dedicated Passive node on the current machine. ```shell -skale sync-node init [ENV_FILE] [--indexer | --archive] [--snapshot] [--snapshot-from ] [--yes] +skale passive-node init [ENV_FILE] [--indexer | --archive] [--snapshot] [--snapshot-from ] [--yes] ``` Arguments: @@ -617,12 +617,12 @@ Options: * `--snapshot-from ` - Specify the IP of another node to download a snapshot from. * `--yes` - Initialize without additional confirmation. -#### Sync node update +#### Passive node update -Update the Sync node software and configuration. +Update the Passive node software and configuration. ```shell -skale sync-node update [ENV_FILEPATH] [--yes] +skale passive-node update [ENV_FILEPATH] [--yes] ``` Arguments: @@ -633,21 +633,21 @@ Options: * `--yes` - Update without additionalconfirmation. -> NOTE: You can just update a file with environment variables used during `skale sync-node init`. +> NOTE: You can just update a file with environment variables used during `skale passive-node init`. -#### Sync node cleanup +#### Passive node cleanup -Remove all data and containers for the Sync node. +Remove all data and containers for the Passive node. ```shell -skale sync-node cleanup [--yes] +skale passive-node cleanup [--yes] ``` Options: * `--yes` - Cleanup without confirmation. -> WARNING: This command removes all Sync node data. +> WARNING: This command removes all Passive node data. *** @@ -1166,14 +1166,14 @@ pip install -e ".[dev]" #### Generate info.py locally -Specify the build type (`normal`, `sync`, or `fair`): +Specify the build type (`normal`, `passive`, or `fair`): ```shell # Example for Standard build ./scripts/generate_info.sh 1.0.0 my-branch normal -# Example for Sync build -./scripts/generate_info.sh 1.0.0 my-branch sync +# Example for Passive build +./scripts/generate_info.sh 1.0.0 my-branch passive # Example for Fair build ./scripts/generate_info.sh 1.0.0 my-branch fair diff --git a/node_cli/cli/sync_node.py b/node_cli/cli/passive_node.py similarity index 76% rename from node_cli/cli/sync_node.py rename to node_cli/cli/passive_node.py index 5f0a5217..c24fe0b5 100644 --- a/node_cli/cli/sync_node.py +++ b/node_cli/cli/passive_node.py @@ -21,7 +21,7 @@ import click -from node_cli.core.node import init_sync, update_sync, cleanup_sync +from node_cli.core.node import init_passive, update_passive, cleanup_passive from node_cli.utils.helper import ( abort_if_false, error_exit, @@ -32,20 +32,20 @@ G_TEXTS = safe_load_texts() -TEXTS = G_TEXTS['sync_node'] +TEXTS = G_TEXTS['passive_node'] @click.group() -def sync_node_cli(): +def passive_node_cli(): pass -@sync_node_cli.group(help='SKALE sync node commands') -def sync_node(): +@passive_node_cli.group(help='SKALE passive node commands') +def passive_node(): pass -@sync_node.command('init', help=TEXTS['init']['help']) +@passive_node.command('init', help=TEXTS['init']['help']) @click.argument('env_file') @click.option('--indexer', help=TEXTS['init']['indexer'], is_flag=True) @click.option('--archive', help=TEXTS['init']['archive'], is_flag=True) @@ -54,15 +54,15 @@ def sync_node(): '--snapshot-from', type=URL_TYPE, default=None, hidden=True, help=TEXTS['init']['snapshot_from'] ) @streamed_cmd -def _init_sync( +def _init_passive( env_file, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str] ) -> None: if indexer and archive: error_exit('Cannot use both --indexer and --archive options') - init_sync(env_file, indexer, archive, snapshot, snapshot_from) + init_passive(env_file, indexer, archive, snapshot, snapshot_from) -@sync_node.command('update', help='Update sync node from .env file') +@passive_node.command('update', help='Update passive node from .env file') @click.option( '--yes', is_flag=True, @@ -73,11 +73,11 @@ def _init_sync( @click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) @click.argument('env_file') @streamed_cmd -def _update_sync(env_file, unsafe_ok): - update_sync(env_file) +def _update_passive(env_file, unsafe_ok): + update_passive(env_file) -@sync_node.command('cleanup', help='Remove sync node data and containers') +@passive_node.command('cleanup', help='Remove passive node data and containers') @click.option( '--yes', is_flag=True, @@ -86,5 +86,5 @@ def _update_sync(env_file, unsafe_ok): prompt='Are you sure you want to remove all node containers and data?', ) @streamed_cmd -def _cleanup_sync() -> None: - cleanup_sync() +def _cleanup_passive() -> None: + cleanup_passive() diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 6709ee75..748aa41a 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -55,7 +55,7 @@ SGX_CERTIFICATES_DIR_NAME = 'sgx_certs' COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') -SYNC_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-sync.yml') +PASSIVE_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-passive.yml') FAIR_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-fair.yml') STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') FAIR_STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'fair_static_params.yaml') diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index eb8764ed..248cdf65 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -116,7 +116,7 @@ class SkaleUserConfig(BaseUserConfig): @dataclass -class SyncUserConfig(BaseUserConfig): +class PassiveUserConfig(BaseUserConfig): endpoint: str manager_contracts: str schain_name: str = '' @@ -176,8 +176,8 @@ def get_user_config_class( user_config_class = FairBootUserConfig elif node_type == NodeType.FAIR: user_config_class = FairUserConfig - elif node_type == NodeType.SYNC: - user_config_class = SyncUserConfig + elif node_type == NodeType.PASSIVE: + user_config_class = PassiveUserConfig else: user_config_class = SkaleUserConfig return user_config_class diff --git a/node_cli/core/nginx.py b/node_cli/core/nginx.py index 97ea4844..c16b0a34 100644 --- a/node_cli/core/nginx.py +++ b/node_cli/core/nginx.py @@ -51,7 +51,7 @@ def check_ssl_certs(): def is_regular_node_nginx() -> bool: - return TYPE in [NodeType.REGULAR, NodeType.SYNC] + return TYPE in [NodeType.REGULAR, NodeType.PASSIVE] def reload_nginx() -> None: diff --git a/node_cli/core/node.py b/node_cli/core/node.py index d803ef58..a47ba10b 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -49,22 +49,22 @@ from node_cli.core.resources import update_resource_allocation from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 from node_cli.operations import ( - cleanup_sync_op, + cleanup_passive_op, configure_nftables, init_op, - init_sync_op, + init_passive_op, restore_op, turn_off_op, turn_on_op, update_op, - update_sync_op, + update_passive_op, ) from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.docker_utils import ( BASE_FAIR_BOOT_COMPOSE_SERVICES, BASE_FAIR_COMPOSE_SERVICES, BASE_SKALE_COMPOSE_SERVICES, - BASE_SYNC_COMPOSE_SERVICES, + BASE_PASSIVE_COMPOSE_SERVICES, is_admin_running, is_api_running, ) @@ -103,7 +103,7 @@ class NodeStatuses(Enum): def is_update_safe(node_type: NodeType) -> bool: if not is_admin_running(node_type): - if node_type == NodeType.SYNC: + if node_type == NodeType.PASSIVE: return True elif not is_api_running(node_type): return True @@ -178,33 +178,33 @@ def restore(backup_path, env_filepath, node_type: NodeType, no_snapshot=False, c @check_not_inited -def init_sync( +def init_passive( env_filepath: str, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str] ) -> None: - env = compose_node_env(env_filepath, node_type=NodeType.SYNC) + env = compose_node_env(env_filepath, node_type=NodeType.PASSIVE) if env is None: return - init_sync_op(env_filepath, env, indexer, archive, snapshot, snapshot_from) + init_passive_op(env_filepath, env, indexer, archive, snapshot, snapshot_from) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(node_type=NodeType.SYNC): + if not is_base_containers_alive(node_type=NodeType.PASSIVE): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) - logger.info('Sync node initialized successfully') + logger.info('Passive node initialized successfully') @check_inited @check_user -def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: +def update_passive(env_filepath: str, unsafe_ok: bool = False) -> None: logger.info('Node update started') prev_version = CliMetaManager().get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() - env = compose_node_env(env_filepath, node_type=NodeType.SYNC) - update_ok = update_sync_op(env_filepath, env) + env = compose_node_env(env_filepath, node_type=NodeType.PASSIVE) + update_ok = update_passive_op(env_filepath, env) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=NodeType.SYNC) + alive = is_base_containers_alive(node_type=NodeType.PASSIVE) if not update_ok or not alive: print_node_cmd_error() return @@ -214,11 +214,11 @@ def update_sync(env_filepath: str, unsafe_ok: bool = False) -> None: @check_inited @check_user -def cleanup_sync() -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.SYNC) +def cleanup_passive() -> None: + env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.PASSIVE) schain_name = env['SCHAIN_NAME'] - cleanup_sync_op(env, schain_name) - logger.info('Sync node was cleaned up, all containers and data removed') + cleanup_passive_op(env, schain_name) + logger.info('Passive node was cleaned up, all containers and data removed') def compose_node_env( @@ -245,7 +245,7 @@ def compose_node_env( is_fair_boot=is_fair_boot, ) - if node_type == NodeType.SYNC or node_type == NodeType.FAIR: + if node_type == NodeType.PASSIVE or node_type == NodeType.FAIR: mnt_dir = SCHAINS_MNT_DIR_SINGLE_CHAIN else: mnt_dir = SCHAINS_MNT_DIR_REGULAR @@ -258,10 +258,10 @@ def compose_node_env( **user_config.to_env(), } - if inited_node and not node_type == NodeType.SYNC: + if inited_node and not node_type == NodeType.PASSIVE: env['FLASK_SECRET_KEY'] = get_flask_secret_key() - if sync_schains and not node_type == NodeType.SYNC: + if sync_schains and not node_type == NodeType.PASSIVE: env['BACKUP_RUN'] = 'True' if pull_config_for_schain: @@ -431,8 +431,8 @@ def get_expected_container_names(node_type: NodeType, is_fair_boot: bool) -> lis services = BASE_FAIR_BOOT_COMPOSE_SERVICES elif node_type == NodeType.FAIR and not is_fair_boot: services = BASE_FAIR_COMPOSE_SERVICES - elif node_type == NodeType.SYNC: - services = BASE_SYNC_COMPOSE_SERVICES + elif node_type == NodeType.PASSIVE: + services = BASE_PASSIVE_COMPOSE_SERVICES else: services = BASE_SKALE_COMPOSE_SERVICES diff --git a/node_cli/main.py b/node_cli/main.py index 7fb833c0..0bd7a8de 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -37,7 +37,7 @@ from node_cli.cli.wallet import wallet_cli from node_cli.cli.ssl import ssl_cli from node_cli.cli.resources_allocation import resources_allocation_cli -from node_cli.cli.sync_node import sync_node_cli +from node_cli.cli.passive_node import passive_node_cli from node_cli.cli.fair_boot import fair_boot_cli from node_cli.cli.fair_node import fair_node_cli from node_cli.cli.chain import chain_cli @@ -84,8 +84,8 @@ def info(): def get_sources_list() -> List[click.MultiCommand]: - if TYPE == NodeType.SYNC: - return [cli, sync_node_cli, ssl_cli] + if TYPE == NodeType.PASSIVE: + return [cli, passive_node_cli, ssl_cli] elif TYPE == NodeType.FAIR: return [ cli, @@ -104,7 +104,7 @@ def get_sources_list() -> List[click.MultiCommand]: logs_cli, resources_allocation_cli, node_cli, - sync_node_cli, + passive_node_cli, wallet_cli, ssl_cli, exit_cli, diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index d0f60eed..5ae8103f 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -20,14 +20,14 @@ from node_cli.operations.base import ( # noqa update as update_op, init as init_op, - init_sync as init_sync_op, + init_passive as init_passive_op, init_fair_boot as init_fair_boot_op, update_fair_boot as update_fair_boot_op, - update_sync as update_sync_op, + update_passive as update_passive_op, turn_off as turn_off_op, turn_on as turn_on_op, restore as restore_op, - cleanup_sync as cleanup_sync_op, + cleanup_passive as cleanup_passive_op, configure_nftables, ) from node_cli.operations.fair import ( # noqa diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index df7eec45..bf5b68f9 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -259,7 +259,7 @@ def init_fair_boot(env_filepath: str, env: dict) -> None: compose_up(env=env, node_type=NodeType.FAIR, is_fair_boot=True) -def init_sync( +def init_passive( env_filepath: str, env: dict, indexer: bool, @@ -308,13 +308,13 @@ def init_sync( ts = int(time.time()) update_node_cli_schain_status(schain_name, repair_ts=ts, snapshot_from=snapshot_from) - update_images(env=env, node_type=NodeType.SYNC) + update_images(env=env, node_type=NodeType.PASSIVE) - compose_up(env=env, node_type=NodeType.SYNC) + compose_up(env=env, node_type=NodeType.PASSIVE) -def update_sync(env_filepath: str, env: Dict) -> bool: - compose_rm(env=env, node_type=NodeType.SYNC) +def update_passive(env_filepath: str, env: Dict) -> bool: + compose_rm(env=env, node_type=NodeType.PASSIVE) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) download_skale_node(env['NODE_VERSION'], env.get('CONTAINER_CONFIGS_DIR')) @@ -341,9 +341,9 @@ def update_sync(env_filepath: str, env: Dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.SYNC) + update_images(env=env, node_type=NodeType.PASSIVE) - compose_up(env=env, node_type=NodeType.SYNC) + compose_up(env=env, node_type=NodeType.PASSIVE) return True @@ -423,8 +423,8 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): return True -def cleanup_sync(env, schain_name: str) -> None: - turn_off(env, node_type=NodeType.SYNC) - cleanup_no_lvm_datadir(schain_name=schain_name) +def cleanup_passive(env, schain_name: str) -> None: + turn_off(env, node_type=NodeType.PASSIVE) + cleanup_no_lvm_datadir(chain_name=schain_name) rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 6ded9e34..0c91f30c 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -35,7 +35,7 @@ NGINX_CONTAINER_NAME, REMOVED_CONTAINERS_FOLDER_PATH, SGX_CERTIFICATES_DIR_NAME, - SYNC_COMPOSE_PATH, + PASSIVE_COMPOSE_PATH, ) from node_cli.utils.helper import run_cmd, str_to_bool from node_cli.utils.node_type import NodeType @@ -76,8 +76,8 @@ 'fair-boot-api': 'fair_boot_api', } -BASE_SYNC_COMPOSE_SERVICES = { - 'skale-sync-admin': 'skale_sync_admin', +BASE_PASSIVE_COMPOSE_SERVICES = { + 'skale-passive-admin': 'skale_passive_admin', 'nginx': 'skale_nginx', } @@ -285,8 +285,8 @@ def compose_build(env: dict, node_type: NodeType): def get_compose_path(node_type: NodeType) -> str: - if node_type == NodeType.SYNC: - return SYNC_COMPOSE_PATH + if node_type == NodeType.PASSIVE: + return PASSIVE_COMPOSE_PATH elif node_type == NodeType.FAIR: return FAIR_COMPOSE_PATH else: @@ -294,8 +294,8 @@ def get_compose_path(node_type: NodeType) -> str: def get_compose_services(node_type: NodeType) -> list[str]: - if node_type == NodeType.SYNC: - result = list(BASE_SYNC_COMPOSE_SERVICES) + if node_type == NodeType.PASSIVE: + result = list(BASE_PASSIVE_COMPOSE_SERVICES) elif node_type == NodeType.FAIR: result = list(BASE_FAIR_COMPOSE_SERVICES) else: @@ -316,9 +316,9 @@ def get_up_compose_cmd(node_type: NodeType, services: list[str] | None = None) - def compose_up( env, node_type: NodeType, is_fair_boot: bool = False, services: list[str] | None = None ): - if node_type == NodeType.SYNC: - logger.info('Running containers for sync node') - run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.SYNC), env=env) + if node_type == NodeType.PASSIVE: + logger.info('Running containers for passive node') + run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.PASSIVE), env=env) return if 'SGX_CERTIFICATES_DIR_NAME' not in env: @@ -406,8 +406,8 @@ def is_admin_running(node_type: NodeType, client: Optional[DockerClient] = None) container_name = 'skale_admin' if node_type == NodeType.FAIR: container_name = 'fair_admin' - elif node_type == NodeType.SYNC: - container_name = 'skale_sync_admin' + elif node_type == NodeType.PASSIVE: + container_name = 'skale_passive_admin' return is_container_running(name=container_name, dclient=client) diff --git a/node_cli/utils/node_type.py b/node_cli/utils/node_type.py index f35d4640..60a44037 100644 --- a/node_cli/utils/node_type.py +++ b/node_cli/utils/node_type.py @@ -22,5 +22,5 @@ class NodeType(Enum): REGULAR = 0 - SYNC = 1 + PASSIVE = 1 FAIR = 2 diff --git a/scripts/build.sh b/scripts/build.sh index a1adc516..afb06faf 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -24,7 +24,7 @@ fi if [ -z "$3" ] then - (>&2 echo 'You should provide type: normal, sync or fair') + (>&2 echo 'You should provide type: normal, passive or fair') echo $USAGE_MSG exit 1 fi @@ -37,8 +37,8 @@ OS=`uname -s`-`uname -m` # Use the new generate_info.sh script bash "${DIR}/generate_info.sh" "$VERSION" "$BRANCH" "$TYPE" -if [ "$TYPE" = "sync" ]; then - EXECUTABLE_NAME=skale-$VERSION-$OS-sync +if [ "$TYPE" = "passive" ]; then + EXECUTABLE_NAME=skale-$VERSION-$OS-passive elif [ "$TYPE" = "fair" ]; then EXECUTABLE_NAME=skale-$VERSION-$OS-fair else diff --git a/scripts/generate_info.sh b/scripts/generate_info.sh index f4993b7e..3c283497 100644 --- a/scripts/generate_info.sh +++ b/scripts/generate_info.sh @@ -18,7 +18,7 @@ if [ -z "$BRANCH" ]; then exit 1 fi if [ -z "$TYPE_STR" ]; then - (>&2 echo 'You should provide type: normal, sync or fair') + (>&2 echo 'You should provide type: normal, passive or fair') echo $USAGE_MSG exit 1 fi @@ -35,14 +35,14 @@ case "$TYPE_STR" in normal) TYPE_ENUM="NodeType.REGULAR" ;; - sync) - TYPE_ENUM="NodeType.SYNC" + passive) + TYPE_ENUM="NodeType.PASSIVE" ;; fair) TYPE_ENUM="NodeType.FAIR" ;; *) - (>&2 echo "Error: Invalid type '$TYPE_STR'. Must be 'normal', 'sync', or 'fair'") + (>&2 echo "Error: Invalid type '$TYPE_STR'. Must be 'normal', 'passive', or 'fair'") exit 1 ;; esac diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 0db1d97a..f1cd3c80 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -326,7 +326,7 @@ def test_backup(): [ (NodeType.REGULAR, 'regular_user_conf'), (NodeType.FAIR, 'fair_user_conf'), - (NodeType.SYNC, 'sync_user_conf'), + (NodeType.PASSIVE, 'passive_user_conf'), ], ) def test_restore(request, node_type, test_user_conf, mocked_g_config, tmp_path): diff --git a/tests/cli/sync_node_test.py b/tests/cli/passive_node_test.py similarity index 84% rename from tests/cli/sync_node_test.py rename to tests/cli/passive_node_test.py index 014510e1..2b5a587d 100644 --- a/tests/cli/sync_node_test.py +++ b/tests/cli/passive_node_test.py @@ -22,7 +22,7 @@ import mock -from node_cli.cli.sync_node import _cleanup_sync, _init_sync, _update_sync +from node_cli.cli.passive_node import _cleanup_passive, _init_passive, _update_passive from node_cli.configs import NODE_DATA_PATH, SKALE_DIR from node_cli.core.node_options import NodeOptions from node_cli.utils.helper import init_default_logger @@ -36,18 +36,18 @@ init_default_logger() -def test_init_sync(mocked_g_config, clean_node_options, sync_user_conf): +def test_init_passive(mocked_g_config, clean_node_options, passive_user_conf): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), - mock.patch('node_cli.core.node.init_sync_op'), + mock.patch('node_cli.core.node.init_passive_op'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), mock.patch('node_cli.configs.user.validate_alias_or_address'), ): - result = run_command(_init_sync, [sync_user_conf.as_posix()]) + result = run_command(_init_passive, [passive_user_conf.as_posix()]) node_options = NodeOptions() assert not node_options.archive @@ -57,7 +57,7 @@ def test_init_sync(mocked_g_config, clean_node_options, sync_user_conf): assert result.exit_code == 0 -def test_init_sync_archive(mocked_g_config, clean_node_options, sync_user_conf): +def test_init_passive_archive(mocked_g_config, clean_node_options, passive_user_conf): pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) with ( mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), @@ -78,9 +78,9 @@ def test_init_sync_archive(mocked_g_config, clean_node_options, sync_user_conf): mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.cli.node.TYPE', NodeType.SYNC), + mock.patch('node_cli.cli.node.TYPE', NodeType.PASSIVE), ): - result = run_command(_init_sync, [sync_user_conf.as_posix(), '--archive']) + result = run_command(_init_passive, [passive_user_conf.as_posix(), '--archive']) node_options = NodeOptions() assert node_options.archive @@ -94,7 +94,7 @@ def test_init_archive_indexer_fail(mocked_g_config, clean_node_options): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), - mock.patch('node_cli.core.node.init_sync_op'), + mock.patch('node_cli.core.node.init_passive_op'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), @@ -102,17 +102,17 @@ def test_init_archive_indexer_fail(mocked_g_config, clean_node_options): mock.patch('node_cli.core.node.compose_node_env', return_value={}), set_env_var('ENV_TYPE', 'devnet'), ): - result = run_command(_init_sync, ['./tests/test-env', '--archive', '--indexer']) + result = run_command(_init_passive, ['./tests/test-env', '--archive', '--indexer']) assert result.exit_code == 1 assert 'Cannot use both' in result.output -def test_update_sync(sync_user_conf, mocked_g_config): +def test_update_passive(passive_user_conf, mocked_g_config): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), - mock.patch('node_cli.core.node.update_sync_op'), + mock.patch('node_cli.core.node.update_passive_op'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), @@ -123,16 +123,16 @@ def test_update_sync(sync_user_conf, mocked_g_config): ), mock.patch('node_cli.configs.user.validate_alias_or_address'), ): - result = run_command(_update_sync, [sync_user_conf.as_posix(), '--yes']) + result = run_command(_update_passive, [passive_user_conf.as_posix(), '--yes']) assert result.exit_code == 0 -def test_cleanup_sync(mocked_g_config): +def test_cleanup_passive(mocked_g_config): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), - mock.patch('node_cli.core.node.cleanup_sync_op'), + mock.patch('node_cli.core.node.cleanup_passive_op'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), @@ -143,5 +143,5 @@ def test_cleanup_sync(mocked_g_config): return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), ): - result = run_command(_cleanup_sync, ['--yes']) + result = run_command(_cleanup_passive, ['--yes']) assert result.exit_code == 0 diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index be7d1eb8..c9d38cbf 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -17,7 +17,7 @@ FairBootUserConfig, FairUserConfig, SkaleUserConfig, - SyncUserConfig, + PassiveUserConfig, get_user_config_class, get_validated_user_config, validate_env_type, @@ -40,11 +40,11 @@ def json(self): 'node_type, is_fair_boot, expected_type', [ (NodeType.REGULAR, False, SkaleUserConfig), - (NodeType.SYNC, False, SyncUserConfig), + (NodeType.PASSIVE, False, PassiveUserConfig), (NodeType.FAIR, True, FairBootUserConfig), (NodeType.FAIR, False, FairUserConfig), ], - ids=['regular', 'sync', 'fair_boot', 'fair_regular'], + ids=['regular', 'passive', 'fair_boot', 'fair_regular'], ) def test_build_env_params_keys(node_type, is_fair_boot, expected_type): env_type = get_user_config_class(node_type=node_type, is_fair_boot=is_fair_boot) diff --git a/tests/conftest.py b/tests/conftest.py index 24c85444..ba860af9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -226,7 +226,7 @@ def tmp_schains_dir(): @pytest.fixture -def tmp_sync_datadir(): +def tmp_passive_datadir(): os.makedirs(TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN, exist_ok=True) try: yield TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN @@ -366,7 +366,7 @@ def fair_boot_user_conf(tmp_path): @pytest.fixture -def sync_user_conf(tmp_path): +def passive_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') try: test_env = """ diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 894e07b6..dd778a6f 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -34,12 +34,12 @@ 'WRONG_CONTAINER_1', 'skale_WRONG_CONTAINER_4', 'fair_WRONG_CONTAINER_6', - 'sync_WRONG_CONTAINER_8', + 'passive_WRONG_CONTAINER_8', ] NODE_TYPE_BOOT_COMBINATIONS: list[tuple[NodeType, bool]] = [ (NodeType.REGULAR, False), - (NodeType.SYNC, False), + (NodeType.PASSIVE, False), (NodeType.FAIR, True), (NodeType.FAIR, False), ] @@ -173,8 +173,8 @@ def test_is_base_containers_alive_empty(node_type, is_boot): True, ), ( - NodeType.SYNC, - 'sync_user_conf', + NodeType.PASSIVE, + 'passive_user_conf', False, False, False, @@ -205,8 +205,8 @@ def test_is_base_containers_alive_empty(node_type, is_boot): ], ids=[ 'regular', - 'regular_sync_flag', - 'sync', + 'regular_passive_flag', + 'passive', 'fair_boot', 'fair_regular', ], @@ -244,7 +244,7 @@ def test_compose_node_env( ) == expect_flask_key if expect_flask_key: assert result_env['FLASK_SECRET_KEY'] == 'mock_secret' - should_have_backup = sync_schains and node_type != NodeType.SYNC + should_have_backup = sync_schains and node_type != NodeType.PASSIVE assert ('BACKUP_RUN' in result_env and result_env['BACKUP_RUN'] == 'True') == should_have_backup @@ -358,7 +358,7 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n assert result is None -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.FAIR]) +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.PASSIVE, NodeType.FAIR]) @mock.patch('node_cli.core.node.is_admin_running', return_value=False) @mock.patch('node_cli.core.node.is_api_running', return_value=False) @mock.patch('node_cli.utils.helper.requests.get') @@ -372,14 +372,14 @@ def test_is_update_safe_when_admin_and_api_not_running( @mock.patch('node_cli.core.node.is_admin_running', return_value=False) @mock.patch('node_cli.core.node.is_api_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') -def test_is_update_safe_when_admin_not_running_for_sync( +def test_is_update_safe_when_admin_not_running_for_passive( mock_requests_get, mock_is_api_running, mock_is_admin_running ): - assert is_update_safe(node_type=NodeType.SYNC) is True + assert is_update_safe(node_type=NodeType.PASSIVE) is True mock_requests_get.assert_not_called() -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.FAIR]) +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.PASSIVE, NodeType.FAIR]) @pytest.mark.parametrize( 'api_is_safe, expected_result', [(True, True), (False, False)], @@ -417,7 +417,7 @@ def test_is_update_safe_when_only_api_running_for_regular( mock_requests_get.assert_called_once() -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.SYNC, NodeType.FAIR]) +@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.PASSIVE, NodeType.FAIR]) @mock.patch('node_cli.core.node.is_admin_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_api_call_fails(mock_requests_get, mock_is_admin_running, node_type): diff --git a/tests/core/core_schains_test.py b/tests/core/core_schains_test.py index c9281adb..ee868242 100644 --- a/tests/core/core_schains_test.py +++ b/tests/core/core_schains_test.py @@ -34,9 +34,9 @@ def test_toggle_repair_mode(tmp_schains_dir): @freezegun.freeze_time(CURRENT_DATETIME) -def test_cleanup_sync_datadir(tmp_sync_datadir): +def test_cleanup_passive_datadir(tmp_passive_datadir): schain_name = 'test_schain' - base_folder = Path(tmp_sync_datadir).joinpath(schain_name) + base_folder = Path(tmp_passive_datadir).joinpath(schain_name) base_folder.mkdir() folders = [ '28e07f34', @@ -81,5 +81,5 @@ def test_cleanup_sync_datadir(tmp_sync_datadir): hash_path.touch() with mock.patch('node_cli.core.schains.rm_btrfs_subvolume'): - cleanup_no_lvm_datadir(schain_name, base_path=tmp_sync_datadir) + cleanup_no_lvm_datadir(schain_name, base_path=tmp_passive_datadir) assert not os.path.isdir(base_folder) diff --git a/tests/core/nginx_test.py b/tests/core/nginx_test.py index 56b6eb49..1652a514 100644 --- a/tests/core/nginx_test.py +++ b/tests/core/nginx_test.py @@ -61,8 +61,8 @@ def nginx_template(): [ (NodeType.REGULAR, True, True, True), (NodeType.REGULAR, False, True, False), - (NodeType.SYNC, True, True, True), - (NodeType.SYNC, False, True, False), + (NodeType.PASSIVE, True, True, True), + (NodeType.PASSIVE, False, True, False), (NodeType.FAIR, True, False, True), (NodeType.FAIR, False, False, False), ], @@ -133,7 +133,7 @@ def test_check_ssl_certs_missing_both(ssl_folder): 'node_type, expected_result', [ (NodeType.REGULAR, True), - (NodeType.SYNC, True), + (NodeType.PASSIVE, True), (NodeType.FAIR, False), ], ) diff --git a/text.yml b/text.yml index 560729a7..85ab8328 100644 --- a/text.yml +++ b/text.yml @@ -60,13 +60,13 @@ exit: wait_for_rotations: "Node is waiting to finish rotations" completed: "Node exiting is completed" -sync_node: +passive_node: init: - help: Initialize sync SKALE node - indexer: Run sync node in indexer mode (disable block rotation) + help: Initialize passive SKALE node + indexer: Run passive node in indexer mode (disable block rotation) archive: Enable historic state and disable block rotation snapshot_from: IP of the node to take snapshot from - snapshot: Start sync node from snapshot + snapshot: Start passive node from snapshot lvmpy: help: Lvmpy commands From ea85aa586c826c3fbfa42a36044233d49c130968 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 6 Aug 2025 21:21:11 +0100 Subject: [PATCH 170/332] Add `set_domain_name` function for FAIR nodes --- node_cli/fair/fair_node.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/fair_node.py index e6f2fb1d..517269fd 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/fair_node.py @@ -217,3 +217,23 @@ def exit() -> None: error_msg = payload logger.error(f'Node exit error {error_msg}') error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +@check_inited +@check_user +def set_domain_name(domain_name): + if not is_node_inited(): + print(TEXTS['fair']['node']['not_inited']) + return + + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='set-domain-name', json={'domain_name': domain_name} + ) + if status == 'ok': + msg = TEXTS['node']['domain_name_changed'] + logger.info(msg) + print(msg) + else: + error_msg = payload + logger.error(f'Setting domain name error {error_msg}') + error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) From 24869ec7bb7817d72000ac7f961d63f61e63190f Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 6 Aug 2025 21:21:55 +0100 Subject: [PATCH 171/332] Add `set_domain_name` command for FAIR nodes --- node_cli/cli/fair_node.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 19b06c93..40e0f5f9 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -32,6 +32,7 @@ from node_cli.fair.fair_node import init as init_fair from node_cli.fair.fair_node import register as register_fair from node_cli.fair.fair_node import update as update_fair +from node_cli.fair.fair_node import set_domain_name as set_domain_name_fair from node_cli.utils.helper import IP_TYPE, URL_OR_ANY_TYPE, abort_if_false, streamed_cmd from node_cli.utils.texts import safe_load_texts @@ -180,3 +181,17 @@ def change_ip(ip: str) -> None: @streamed_cmd def exit_node() -> None: exit_fair() + + +@node.command('set-domain', help='Set node domain name') +@click.option('--domain', '-d', prompt='Enter node domain name', type=str, help='Node domain name') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to set domain name?', +) +@streamed_cmd +def set_domain_name(domain): + set_domain_name_fair(domain) From d7681a6d0984d15d1e295b3151388bd5b076b1a6 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 6 Aug 2025 21:33:17 +0100 Subject: [PATCH 172/332] Sort imports --- node_cli/cli/fair_node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 40e0f5f9..56302429 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -31,8 +31,8 @@ ) from node_cli.fair.fair_node import init as init_fair from node_cli.fair.fair_node import register as register_fair -from node_cli.fair.fair_node import update as update_fair from node_cli.fair.fair_node import set_domain_name as set_domain_name_fair +from node_cli.fair.fair_node import update as update_fair from node_cli.utils.helper import IP_TYPE, URL_OR_ANY_TYPE, abort_if_false, streamed_cmd from node_cli.utils.texts import safe_load_texts From 7acee44374b6e430f17b25ef465b70bba7eeae5b Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 7 Aug 2025 15:55:59 +0100 Subject: [PATCH 173/332] Add import sort check --- ruff.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ruff.toml b/ruff.toml index 9653b675..7eb49e79 100644 --- a/ruff.toml +++ b/ruff.toml @@ -5,4 +5,4 @@ quote-style = "single" [lint] # Add the `line-too-long` rule to the enforced rule set. -extend-select = ["E501"] \ No newline at end of file +extend-select = ["E501", "I"] From 09594494b6ababbb20e0383ca5328bbfbfca8bde Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 7 Aug 2025 15:57:45 +0100 Subject: [PATCH 174/332] Add 'set-domain-name' to 'ROUTS' for FAIR node --- node_cli/configs/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index a1a17879..a0f15ef1 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -40,7 +40,7 @@ 'schains': ['config', 'list', 'dkg-statuses', 'firewall-rules', 'repair', 'get'], 'ssl': ['status', 'upload'], 'wallet': ['info', 'send-eth'], - 'fair-node': ['info', 'register', 'change-ip', 'exit'], + 'fair-node': ['info', 'register', 'set-domain-name', 'change-ip', 'exit'], 'fair-chain': ['record', 'checks'], } } From 663b340e5ceb0e44e3d53678d887a0a4b9b55840 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 7 Aug 2025 17:42:05 +0100 Subject: [PATCH 175/332] Disable import sort check --- ruff.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ruff.toml b/ruff.toml index 7eb49e79..978b38de 100644 --- a/ruff.toml +++ b/ruff.toml @@ -5,4 +5,4 @@ quote-style = "single" [lint] # Add the `line-too-long` rule to the enforced rule set. -extend-select = ["E501", "I"] +extend-select = ["E501"] From a85445d437418b814a08cd25ce7053a3dc995c77 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 7 Aug 2025 18:13:01 +0100 Subject: [PATCH 176/332] Fix tests --- tests/routes_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/routes_test.py b/tests/routes_test.py index bcf1bf98..d17c3920 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -34,6 +34,7 @@ '/api/v1/fair-node/info', '/api/v1/fair-node/register', '/api/v1/fair-node/change-ip', + '/api/v1/fair-node/set-domain-name', '/api/v1/fair-node/exit', '/api/v1/fair-chain/record', '/api/v1/fair-chain/checks', From c9879cf3dcdbcad5ce11d362a98fe27aef2707bc Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 7 Aug 2025 18:51:29 +0100 Subject: [PATCH 177/332] Fix tests --- tests/routes_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/routes_test.py b/tests/routes_test.py index d17c3920..7a1216a8 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -33,8 +33,8 @@ '/api/v1/wallet/send-eth', '/api/v1/fair-node/info', '/api/v1/fair-node/register', - '/api/v1/fair-node/change-ip', '/api/v1/fair-node/set-domain-name', + '/api/v1/fair-node/change-ip', '/api/v1/fair-node/exit', '/api/v1/fair-chain/record', '/api/v1/fair-chain/checks', From e4e313c583010273a2b7872630a568b36b736018 Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 8 Aug 2025 16:24:04 +0100 Subject: [PATCH 178/332] Add telegraf related env params to fair --- node_cli/configs/user.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index eb8764ed..ed9fdeb1 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -86,6 +86,9 @@ class FairUserConfig(BaseUserConfig): boot_endpoint: str sgx_server_url: str enforce_btrfs: str = '' + telegraf: str = '' + influx_token: str = '' + influx_url: str = '' @dataclass From 20a37cd503261b433593fe8617e93aa465ade50c Mon Sep 17 00:00:00 2001 From: badrogger Date: Fri, 8 Aug 2025 17:41:54 +0100 Subject: [PATCH 179/332] Remove influx_db_token --- node_cli/cli/fair_boot.py | 4 +--- node_cli/configs/user.py | 2 -- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/node_cli/cli/fair_boot.py b/node_cli/cli/fair_boot.py index 703bea88..24f3490a 100644 --- a/node_cli/cli/fair_boot.py +++ b/node_cli/cli/fair_boot.py @@ -50,9 +50,7 @@ def init_boot(env_file): @boot.command('register', help='Register Fair node in SKALE Manager (during Boot Phase).') -@click.option( - '--name', '-n', required=True, prompt='Enter fair node name', help='Fair node name' -) +@click.option('--name', '-n', required=True, prompt='Enter fair node name', help='Fair node name') @click.option( '--ip', prompt='Enter node public IP', diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index ed9fdeb1..64b82b82 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -87,7 +87,6 @@ class FairUserConfig(BaseUserConfig): sgx_server_url: str enforce_btrfs: str = '' telegraf: str = '' - influx_token: str = '' influx_url: str = '' @@ -109,7 +108,6 @@ class SkaleUserConfig(BaseUserConfig): sgx_server_url: str monitoring_containers: str = '' telegraf: str = '' - influx_token: str = '' influx_url: str = '' tg_api_key: str = '' tg_chat_id: str = '' From e6c23e7c65f607d00d185e7a758b85a24dc42618 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Fri, 8 Aug 2025 18:03:49 +0100 Subject: [PATCH 180/332] Update node-cli internal structure - WIP --- .github/workflows/publish.yml | 4 +- .github/workflows/test.yml | 10 ++--- node_cli/cli/resources_allocation.py | 2 +- node_cli/configs/user.py | 24 ++++++++--- node_cli/core/nginx.py | 10 ++--- node_cli/core/node.py | 11 ++--- node_cli/core/node_options.py | 47 ++++++++++++++++++++-- node_cli/main.py | 4 +- node_cli/operations/base.py | 6 ++- node_cli/operations/fair.py | 2 +- node_cli/utils/docker_utils.py | 30 ++++++++------ node_cli/utils/node_type.py | 10 +++-- scripts/build.sh | 7 ++-- scripts/generate_info.sh | 11 ++--- tests/cli/node_test.py | 6 +-- tests/cli/resources_allocation_test.py | 2 +- tests/configs/configs_env_validate_test.py | 10 ++--- tests/core/core_checks_test.py | 4 +- tests/core/core_node_test.py | 18 ++++----- tests/core/nginx_test.py | 14 +++---- 20 files changed, 146 insertions(+), 86 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 96290f1b..660ec07d 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -72,7 +72,7 @@ jobs: strategy: matrix: os: [ubuntu-22.04] - build_type: [normal, passive, fair] + build_type: [skale, fair] steps: - name: Checkout code uses: actions/checkout@v4 @@ -96,7 +96,7 @@ jobs: id: asset_details run: | ASSET_BASE_NAME="skale-${{ needs.create_release.outputs.version }}-Linux-x86_64" - if [[ "${{ matrix.build_type }}" == "normal" ]]; then + if [[ "${{ matrix.build_type }}" == "skale" ]]; then echo "FINAL_ASSET_NAME=${ASSET_BASE_NAME}" >> $GITHUB_OUTPUT else echo "FINAL_ASSET_NAME=${ASSET_BASE_NAME}-${{ matrix.build_type }}" >> $GITHUB_OUTPUT diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 043c1792..ed804b5c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -31,20 +31,20 @@ jobs: pip install -e ".[dev]" - name: Generate info - run: bash ./scripts/generate_info.sh 1.0.0 my-branch normal + run: bash ./scripts/generate_info.sh 1.0.0 my-branch skale - name: Check with ruff run: | ruff check - - name: Build binary - normal + - name: Build binary - skale run: | mkdir -p ./dist docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test normal + docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test skale docker rm -f $(docker ps -aq) - - name: Check build - normal + - name: Check build - skale run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64 - name: Build binary - passive @@ -69,7 +69,7 @@ jobs: - name: Run prepare test build run: | - bash scripts/build.sh test test normal + bash scripts/build.sh test test skale - name: Run redis run: | diff --git a/node_cli/cli/resources_allocation.py b/node_cli/cli/resources_allocation.py index 01825350..a6b2e185 100644 --- a/node_cli/cli/resources_allocation.py +++ b/node_cli/cli/resources_allocation.py @@ -61,4 +61,4 @@ def show(): ) @click.option('--force', '-f', is_flag=True, help='Rewrite if already exists') def generate(env_file, force): - generate_resource_allocation_config(node_type=NodeType.REGULAR, env_file=env_file, force=force) + generate_resource_allocation_config(node_type=NodeType.SKALE, env_file=env_file, force=force) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 248cdf65..6acc6d1a 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -88,6 +88,13 @@ class FairUserConfig(BaseUserConfig): enforce_btrfs: str = '' +@dataclass +class PassiveFairUserConfig(BaseUserConfig): + fair_contracts: str + boot_endpoint: str + enforce_btrfs: str = '' + + @dataclass class FairBootUserConfig(BaseUserConfig): endpoint: str @@ -116,7 +123,7 @@ class SkaleUserConfig(BaseUserConfig): @dataclass -class PassiveUserConfig(BaseUserConfig): +class PassiveSkaleUserConfig(BaseUserConfig): endpoint: str manager_contracts: str schain_name: str = '' @@ -170,16 +177,21 @@ def parse_env_file(env_filepath: str) -> Dict: def get_user_config_class( node_type: NodeType, + is_passive: bool = False, is_fair_boot: bool = False, ) -> type[BaseUserConfig]: if node_type == NodeType.FAIR and is_fair_boot: user_config_class = FairBootUserConfig elif node_type == NodeType.FAIR: - user_config_class = FairUserConfig - elif node_type == NodeType.PASSIVE: - user_config_class = PassiveUserConfig - else: - user_config_class = SkaleUserConfig + if is_passive: + user_config_class = PassiveFairUserConfig + else: + user_config_class = FairUserConfig + elif node_type == NodeType.SKALE: + if is_passive: + user_config_class = PassiveSkaleUserConfig + else: + user_config_class = SkaleUserConfig return user_config_class diff --git a/node_cli/core/nginx.py b/node_cli/core/nginx.py index c16b0a34..340e3299 100644 --- a/node_cli/core/nginx.py +++ b/node_cli/core/nginx.py @@ -35,12 +35,12 @@ def generate_nginx_config() -> None: ssl_on = check_ssl_certs() - regular_node = is_regular_node_nginx() + skale_node = is_skale_node_nginx() template_data = { 'ssl': ssl_on, - 'regular_node': regular_node, + 'skale_node': skale_node, } - logger.info(f'Processing nginx template. ssl: {ssl_on}, regular_node: {regular_node}') + logger.info(f'Processing nginx template. ssl: {ssl_on}, skale_node: {skale_node}') process_template(NGINX_TEMPLATE_FILEPATH, NGINX_CONFIG_FILEPATH, template_data) @@ -50,8 +50,8 @@ def check_ssl_certs(): return os.path.exists(crt_path) and os.path.exists(key_path) -def is_regular_node_nginx() -> bool: - return TYPE in [NodeType.REGULAR, NodeType.PASSIVE] +def is_skale_node_nginx() -> bool: + return TYPE == NodeType.SKALE def reload_nginx() -> None: diff --git a/node_cli/core/node.py b/node_cli/core/node.py index a47ba10b..9b0a8d81 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -46,6 +46,7 @@ from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH, get_validated_user_config from node_cli.core.checks import run_checks as run_host_checks from node_cli.core.host import get_flask_secret_key, is_node_inited, save_env_params +from node_cli.core.node_options import is_passive_node from node_cli.core.resources import update_resource_allocation from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 from node_cli.operations import ( @@ -101,11 +102,11 @@ class NodeStatuses(Enum): NOT_CREATED = 5 -def is_update_safe(node_type: NodeType) -> bool: - if not is_admin_running(node_type): - if node_type == NodeType.PASSIVE: +def is_update_safe() -> bool: + if not is_admin_running(): + if is_passive_node(): return True - elif not is_api_running(node_type): + elif not is_api_running(): return True status, payload = get_request(BLUEPRINT_NAME, 'update-safe') if status == 'error': @@ -223,7 +224,7 @@ def cleanup_passive() -> None: def compose_node_env( env_filepath: str, - node_type: NodeType, + # node_type: NodeType, inited_node: bool = False, sync_schains: Optional[bool] = None, pull_config_for_schain: Optional[str] = None, diff --git a/node_cli/core/node_options.py b/node_cli/core/node_options.py index 49a0ea05..e6500399 100644 --- a/node_cli/core/node_options.py +++ b/node_cli/core/node_options.py @@ -19,8 +19,11 @@ import logging +from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.helper import read_json, write_json, init_file from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH +from node_cli.cli.info import TYPE + logger = logging.getLogger(__name__) @@ -41,7 +44,7 @@ def _set(self, field_name: str, field_value) -> None: @property def archive(self) -> bool: - return self._get('archive') + return self._get('archive') or False @archive.setter def archive(self, archive: bool) -> None: @@ -49,7 +52,7 @@ def archive(self, archive: bool) -> None: @property def catchup(self) -> bool: - return self._get('catchup') + return self._get('catchup') or False @catchup.setter def catchup(self, catchup: bool) -> None: @@ -57,11 +60,49 @@ def catchup(self, catchup: bool) -> None: @property def historic_state(self) -> bool: - return self._get('historic_state') + return self._get('historic_state') or False @historic_state.setter def historic_state(self, historic_state: bool) -> None: return self._set('historic_state', historic_state) + @property + def node_mode(self) -> NodeMode: + return NodeMode(self._get('node_mode')) + + @node_mode.setter + def node_mode(self, node_mode: NodeMode) -> None: + return self._set('node_mode', node_mode.name) + def all(self) -> dict: return read_json(self.filepath) + + +def mark_active_node() -> None: + node_options = NodeOptions() + node_options.node_mode = NodeMode.ACTIVE + logger.info('Node marked as active.') + + +def mark_passive_node() -> None: + node_options = NodeOptions() + node_options.node_mode = NodeMode.PASSIVE + logger.info('Node marked as passive.') + + +def is_active_node() -> bool: + node_options = NodeOptions() + return node_options.node_mode == NodeMode.ACTIVE + + +def is_passive_node() -> bool: + node_options = NodeOptions() + return node_options.node_mode == NodeMode.PASSIVE + + +def is_skale_node() -> bool: + return TYPE == NodeType.SKALE + + +def is_fair_node() -> bool: + return TYPE == NodeType.FAIR diff --git a/node_cli/main.py b/node_cli/main.py index 0bd7a8de..e949f682 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -84,9 +84,7 @@ def info(): def get_sources_list() -> List[click.MultiCommand]: - if TYPE == NodeType.PASSIVE: - return [cli, passive_node_cli, ssl_cli] - elif TYPE == NodeType.FAIR: + if TYPE == NodeType.FAIR: return [ cli, logs_cli, diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index bf5b68f9..47ce58aa 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -67,7 +67,7 @@ ) from node_cli.utils.helper import rm_dir, str_to_bool from node_cli.utils.meta import CliMetaManager, FairCliMetaManager -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode from node_cli.utils.print_formatters import print_failed_requirements_checks logger = logging.getLogger(__name__) @@ -195,7 +195,6 @@ def update_fair_boot(env_filepath: str, env: Dict) -> bool: @checked_host def init(env_filepath: str, env: dict, node_type: NodeType) -> None: sync_skale_node() - ensure_btrfs_kernel_module_autoloaded() if env.get('SKIP_DOCKER_CONFIG') != 'True': configure_docker() @@ -206,6 +205,9 @@ def init(env_filepath: str, env: dict, node_type: NodeType) -> None: prepare_host(env_filepath, env_type=env['ENV_TYPE']) link_env_file() + node_options = NodeOptions() + node_options.node_mode = NodeMode.ACTIVE + configure_filebeat() configure_flask() generate_nginx_config() diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 3455c21a..9a1d15c6 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -274,7 +274,7 @@ def trigger_skaled_snapshot_mode(env: dict, snapshot_from: str = 'any') -> None: def repair(env: dict, snapshot_from: str = 'any') -> None: logger.info('Starting fair node repair') container_name = 'fair_admin' - if is_admin_running(node_type=NodeType.FAIR): + if is_admin_running(): logger.info('Stopping admin container') stop_container_by_name(container_name=container_name) logger.info('Removing chain container') diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 0c91f30c..ac9a428e 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -37,6 +37,7 @@ SGX_CERTIFICATES_DIR_NAME, PASSIVE_COMPOSE_PATH, ) +from node_cli.core.node_options import is_active_node, is_fair_node from node_cli.utils.helper import run_cmd, str_to_bool from node_cli.utils.node_type import NodeType @@ -340,13 +341,13 @@ def compose_up( else: logger.info('Running skale node base set of containers') logger.debug('Launching skale node containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.REGULAR), env=env) + run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.SKALE), env=env) if 'TG_API_KEY' in env and 'TG_CHAT_ID' in env: logger.info('Running containers for Telegram notifications') run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.REGULAR, services=list(NOTIFICATION_COMPOSE_SERVICES) + node_type=NodeType.SKALE, services=list(NOTIFICATION_COMPOSE_SERVICES) ), env=env, ) @@ -355,7 +356,7 @@ def compose_up( logger.info('Running monitoring containers') run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.REGULAR, services=list(MONITORING_COMPOSE_SERVICES) + node_type=NodeType.SKALE, services=list(MONITORING_COMPOSE_SERVICES) ), env=env, ) @@ -395,20 +396,25 @@ def is_container_running(name: str, dclient: Optional[DockerClient] = None) -> b return False -def is_api_running(node_type: NodeType, dclient: Optional[DockerClient] = None) -> bool: - if node_type == NodeType.FAIR: +def is_api_running(dclient: Optional[DockerClient] = None) -> bool: + if is_fair_node(): return is_container_running(name='fair_api', dclient=dclient) else: return is_container_running(name='skale_api', dclient=dclient) -def is_admin_running(node_type: NodeType, client: Optional[DockerClient] = None) -> bool: - container_name = 'skale_admin' - if node_type == NodeType.FAIR: - container_name = 'fair_admin' - elif node_type == NodeType.PASSIVE: - container_name = 'skale_passive_admin' - return is_container_running(name=container_name, dclient=client) +def is_admin_running(dclient: Optional[DockerClient] = None) -> bool: + if is_fair_node(): + if is_active_node(): + container_name = 'fair_admin' + else: + container_name = 'fair_passive_admin' + else: + if is_active_node(): + container_name = 'skale_admin' + else: + container_name = 'skale_passive_admin' + return is_container_running(name=container_name, dclient=dclient) def system_prune(): diff --git a/node_cli/utils/node_type.py b/node_cli/utils/node_type.py index 60a44037..bf3f6d4f 100644 --- a/node_cli/utils/node_type.py +++ b/node_cli/utils/node_type.py @@ -21,6 +21,10 @@ class NodeType(Enum): - REGULAR = 0 - PASSIVE = 1 - FAIR = 2 + SKALE = 0 + FAIR = 1 + + +class NodeMode(str, Enum): + ACTIVE = 'active' + PASSIVE = 'passive' diff --git a/scripts/build.sh b/scripts/build.sh index afb06faf..8e26577a 100755 --- a/scripts/build.sh +++ b/scripts/build.sh @@ -24,7 +24,7 @@ fi if [ -z "$3" ] then - (>&2 echo 'You should provide type: normal, passive or fair') + (>&2 echo 'You should provide type: skale, or fair') echo $USAGE_MSG exit 1 fi @@ -37,9 +37,8 @@ OS=`uname -s`-`uname -m` # Use the new generate_info.sh script bash "${DIR}/generate_info.sh" "$VERSION" "$BRANCH" "$TYPE" -if [ "$TYPE" = "passive" ]; then - EXECUTABLE_NAME=skale-$VERSION-$OS-passive -elif [ "$TYPE" = "fair" ]; then + +if [ "$TYPE" = "fair" ]; then EXECUTABLE_NAME=skale-$VERSION-$OS-fair else EXECUTABLE_NAME=skale-$VERSION-$OS diff --git a/scripts/generate_info.sh b/scripts/generate_info.sh index 3c283497..67ee6aef 100644 --- a/scripts/generate_info.sh +++ b/scripts/generate_info.sh @@ -18,7 +18,7 @@ if [ -z "$BRANCH" ]; then exit 1 fi if [ -z "$TYPE_STR" ]; then - (>&2 echo 'You should provide type: normal, passive or fair') + (>&2 echo 'You should provide type: skale or fair') echo $USAGE_MSG exit 1 fi @@ -32,17 +32,14 @@ CURRENT_DATETIME="$(date "+%Y-%m-%d %H:%M:%S")" OS="$(uname -s)-$(uname -m)" case "$TYPE_STR" in - normal) - TYPE_ENUM="NodeType.REGULAR" - ;; - passive) - TYPE_ENUM="NodeType.PASSIVE" + skale) + TYPE_ENUM="NodeType.SKALE" ;; fair) TYPE_ENUM="NodeType.FAIR" ;; *) - (>&2 echo "Error: Invalid type '$TYPE_STR'. Must be 'normal', 'passive', or 'fair'") + (>&2 echo "Error: Invalid type '$TYPE_STR'. Must be 'skale', or 'fair'") exit 1 ;; esac diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index f1cd3c80..c20b390c 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -324,7 +324,7 @@ def test_backup(): @pytest.mark.parametrize( 'node_type,test_user_conf', [ - (NodeType.REGULAR, 'regular_user_conf'), + (NodeType.SKALE, 'regular_user_conf'), (NodeType.FAIR, 'fair_user_conf'), (NodeType.PASSIVE, 'passive_user_conf'), ], @@ -394,7 +394,7 @@ def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf): mock.patch('node_cli.core.node.turn_off_op'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR), + mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', @@ -427,7 +427,7 @@ def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf): mock.patch('node_cli.core.node.is_base_containers_alive'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR), + mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), ): result = run_command_mock( 'node_cli.utils.helper.requests.post', diff --git a/tests/cli/resources_allocation_test.py b/tests/cli/resources_allocation_test.py index 4f59daea..03b2e73b 100644 --- a/tests/cli/resources_allocation_test.py +++ b/tests/cli/resources_allocation_test.py @@ -72,7 +72,7 @@ def test_generate_already_exists(regular_user_conf, resource_alloc_config): resp_mock = response_mock(requests.codes.created) with ( mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.cli.node.TYPE', NodeType.REGULAR), + mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), mock.patch('node_cli.configs.user.validate_alias_or_address'), ): result = run_command_mock( diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index c9d38cbf..4cdfc959 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -17,7 +17,7 @@ FairBootUserConfig, FairUserConfig, SkaleUserConfig, - PassiveUserConfig, + PassiveSkaleUserConfig, get_user_config_class, get_validated_user_config, validate_env_type, @@ -39,8 +39,8 @@ def json(self): @pytest.mark.parametrize( 'node_type, is_fair_boot, expected_type', [ - (NodeType.REGULAR, False, SkaleUserConfig), - (NodeType.PASSIVE, False, PassiveUserConfig), + (NodeType.SKALE, False, SkaleUserConfig), + (NodeType.PASSIVE, False, PassiveSkaleUserConfig), (NodeType.FAIR, True, FairBootUserConfig), (NodeType.FAIR, False, FairUserConfig), ], @@ -166,7 +166,7 @@ def test_validate_env_alias_or_address_with_alias(requests_mock): def test_get_validated_env_config_missing_file(): with pytest.raises(SystemExit): - get_validated_user_config(env_filepath='nonexistent.env', node_type=NodeType.REGULAR) + get_validated_user_config(env_filepath='nonexistent.env', node_type=NodeType.SKALE) def test_get_validated_env_config_unreadable_file(tmp_path): @@ -176,6 +176,6 @@ def test_get_validated_env_config_unreadable_file(tmp_path): try: os.chmod(env_file, 0o000) with pytest.raises(PermissionError): - get_validated_user_config(env_filepath=str(env_file), node_type=NodeType.REGULAR) + get_validated_user_config(env_filepath=str(env_file), node_type=NodeType.SKALE) finally: os.chmod(env_file, original_mode) diff --git a/tests/core/core_checks_test.py b/tests/core/core_checks_test.py index a4f7d437..5206efe4 100644 --- a/tests/core/core_checks_test.py +++ b/tests/core/core_checks_test.py @@ -379,8 +379,8 @@ def test_merge_report(): def test_get_static_params(tmp_config_dir): - params = get_static_params(NodeType.REGULAR) + params = get_static_params(NodeType.SKALE) shutil.copy(STATIC_PARAMS_FILEPATH, tmp_config_dir) - tmp_params = get_static_params(NodeType.REGULAR, config_path=tmp_config_dir) + tmp_params = get_static_params(NodeType.SKALE, config_path=tmp_config_dir) assert params['server']['cpu_total'] == 8 assert params == tmp_params diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index dd778a6f..ba1ab95f 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -38,7 +38,7 @@ ] NODE_TYPE_BOOT_COMBINATIONS: list[tuple[NodeType, bool]] = [ - (NodeType.REGULAR, False), + (NodeType.SKALE, False), (NodeType.PASSIVE, False), (NodeType.FAIR, True), (NodeType.FAIR, False), @@ -153,7 +153,7 @@ def test_is_base_containers_alive_empty(node_type, is_boot): ), [ ( - NodeType.REGULAR, + NodeType.SKALE, 'regular_user_conf', False, True, @@ -163,7 +163,7 @@ def test_is_base_containers_alive_empty(node_type, is_boot): False, ), ( - NodeType.REGULAR, + NodeType.SKALE, 'regular_user_conf', False, True, @@ -323,7 +323,7 @@ def test_init_node(regular_user_conf, no_resource_file): # todo: write new init mock.patch('node_cli.utils.helper.post_request', resp_mock), mock.patch('node_cli.configs.user.validate_alias_or_address'), ): - init(env_filepath=regular_user_conf.as_posix(), node_type=NodeType.REGULAR) + init(env_filepath=regular_user_conf.as_posix(), node_type=NodeType.SKALE) assert os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) @@ -353,12 +353,12 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n result = update( regular_user_conf.as_posix(), pull_config_for_schain=None, - node_type=NodeType.REGULAR, + node_type=NodeType.SKALE, ) assert result is None -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.PASSIVE, NodeType.FAIR]) +@pytest.mark.parametrize('node_type', [NodeType.SKALE, NodeType.PASSIVE, NodeType.FAIR]) @mock.patch('node_cli.core.node.is_admin_running', return_value=False) @mock.patch('node_cli.core.node.is_api_running', return_value=False) @mock.patch('node_cli.utils.helper.requests.get') @@ -379,7 +379,7 @@ def test_is_update_safe_when_admin_not_running_for_passive( mock_requests_get.assert_not_called() -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.PASSIVE, NodeType.FAIR]) +@pytest.mark.parametrize('node_type', [NodeType.SKALE, NodeType.PASSIVE, NodeType.FAIR]) @pytest.mark.parametrize( 'api_is_safe, expected_result', [(True, True), (False, False)], @@ -395,7 +395,7 @@ def test_is_update_safe_when_admin_running( mock_requests_get.assert_called_once() -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.FAIR]) +@pytest.mark.parametrize('node_type', [NodeType.SKALE, NodeType.FAIR]) @pytest.mark.parametrize( 'api_is_safe, expected_result', [(True, True), (False, False)], @@ -417,7 +417,7 @@ def test_is_update_safe_when_only_api_running_for_regular( mock_requests_get.assert_called_once() -@pytest.mark.parametrize('node_type', [NodeType.REGULAR, NodeType.PASSIVE, NodeType.FAIR]) +@pytest.mark.parametrize('node_type', [NodeType.SKALE, NodeType.PASSIVE, NodeType.FAIR]) @mock.patch('node_cli.core.node.is_admin_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_api_call_fails(mock_requests_get, mock_is_admin_running, node_type): diff --git a/tests/core/nginx_test.py b/tests/core/nginx_test.py index 1652a514..c24645dc 100644 --- a/tests/core/nginx_test.py +++ b/tests/core/nginx_test.py @@ -7,7 +7,7 @@ from node_cli.core.nginx import ( generate_nginx_config, check_ssl_certs, - is_regular_node_nginx, + is_skale_node_nginx, SSL_KEY_NAME, SSL_CRT_NAME, ) @@ -24,7 +24,7 @@ {% endif %} } -{% if regular_node %} +{% if skale_node %} server { listen 80; {% if ssl %} @@ -59,8 +59,8 @@ def nginx_template(): @pytest.mark.parametrize( 'node_type, ssl_exists, expected_regular_flag, expected_ssl_flag', [ - (NodeType.REGULAR, True, True, True), - (NodeType.REGULAR, False, True, False), + (NodeType.SKALE, True, True, True), + (NodeType.SKALE, False, True, False), (NodeType.PASSIVE, True, True, True), (NodeType.PASSIVE, False, True, False), (NodeType.FAIR, True, False, True), @@ -132,14 +132,14 @@ def test_check_ssl_certs_missing_both(ssl_folder): @pytest.mark.parametrize( 'node_type, expected_result', [ - (NodeType.REGULAR, True), + (NodeType.SKALE, True), (NodeType.PASSIVE, True), (NodeType.FAIR, False), ], ) @mock.patch('node_cli.core.nginx.TYPE') -def test_is_regular_node_nginx(mock_type, node_type, expected_result): +def test_is_skale_node_nginx(mock_type, node_type, expected_result): mock_type.__eq__.side_effect = lambda other: node_type == other mock_type.__ne__.side_effect = lambda other: node_type != other - assert is_regular_node_nginx() is expected_result + assert is_skale_node_nginx() is expected_result From 9b5c410020ac037b90842500bf12318862b4e4a1 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Sun, 10 Aug 2025 15:44:22 +0100 Subject: [PATCH 181/332] Restructure node-cli - add NodeType and NodeMode to all modules --- node_cli/cli/fair_boot.py | 4 +- node_cli/configs/__init__.py | 1 + node_cli/configs/user.py | 17 ++- node_cli/core/node.py | 115 ++++++++++++------- node_cli/core/node_options.py | 22 ++-- node_cli/fair/fair_boot.py | 16 ++- node_cli/fair/fair_node.py | 45 +++++--- node_cli/operations/base.py | 62 ++++++----- node_cli/operations/config_repo.py | 8 +- node_cli/operations/fair.py | 35 +++--- node_cli/utils/docker_utils.py | 123 +++++++++++++-------- tests/configs/configs_env_validate_test.py | 30 +++-- 12 files changed, 296 insertions(+), 182 deletions(-) diff --git a/node_cli/cli/fair_boot.py b/node_cli/cli/fair_boot.py index 703bea88..24f3490a 100644 --- a/node_cli/cli/fair_boot.py +++ b/node_cli/cli/fair_boot.py @@ -50,9 +50,7 @@ def init_boot(env_file): @boot.command('register', help='Register Fair node in SKALE Manager (during Boot Phase).') -@click.option( - '--name', '-n', required=True, prompt='Enter fair node name', help='Fair node name' -) +@click.option('--name', '-n', required=True, prompt='Enter fair node name', help='Fair node name') @click.option( '--ip', prompt='Enter node public IP', diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 748aa41a..8fbecb04 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -57,6 +57,7 @@ COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') PASSIVE_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-passive.yml') FAIR_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-fair.yml') +PASSIVE_FAIR_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-fair-passive.yml') STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') FAIR_STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'fair_static_params.yaml') diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 6acc6d1a..23e1c7af 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -28,7 +28,7 @@ from node_cli.configs import CONTAINER_CONFIG_PATH, SKALE_DIR from node_cli.configs.alias_address_validation import ContractType, validate_alias_or_address from node_cli.utils.helper import error_exit -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeMode, NodeType SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') @@ -133,11 +133,16 @@ class PassiveSkaleUserConfig(BaseUserConfig): def get_validated_user_config( node_type: NodeType, + node_mode: NodeMode, env_filepath: str = SKALE_DIR_ENV_FILEPATH, is_fair_boot: bool = False, ) -> BaseUserConfig: params = parse_env_file(env_filepath) - user_config_class = get_user_config_class(node_type, is_fair_boot) + user_config_class = get_user_config_class( + node_type=node_type, + node_mode=node_mode, + is_fair_boot=is_fair_boot, + ) _, missing_params, extra_params = user_config_class.validate_params(params) if len(missing_params) > 0: @@ -177,18 +182,18 @@ def parse_env_file(env_filepath: str) -> Dict: def get_user_config_class( node_type: NodeType, - is_passive: bool = False, - is_fair_boot: bool = False, + node_mode: NodeMode, + is_fair_boot: bool, ) -> type[BaseUserConfig]: if node_type == NodeType.FAIR and is_fair_boot: user_config_class = FairBootUserConfig elif node_type == NodeType.FAIR: - if is_passive: + if node_mode == NodeMode.PASSIVE: user_config_class = PassiveFairUserConfig else: user_config_class = FairUserConfig elif node_type == NodeType.SKALE: - if is_passive: + if node_mode == NodeMode.PASSIVE: user_config_class = PassiveSkaleUserConfig else: user_config_class = SkaleUserConfig diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 9b0a8d81..7577f166 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -46,8 +46,14 @@ from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH, get_validated_user_config from node_cli.core.checks import run_checks as run_host_checks from node_cli.core.host import get_flask_secret_key, is_node_inited, save_env_params -from node_cli.core.node_options import is_passive_node from node_cli.core.resources import update_resource_allocation +from node_cli.core.node_options import ( + active_fair, + active_skale, + get_node_mode, + passive_skale, + passive_fair, +) from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 from node_cli.operations import ( cleanup_passive_op, @@ -66,6 +72,7 @@ BASE_FAIR_COMPOSE_SERVICES, BASE_SKALE_COMPOSE_SERVICES, BASE_PASSIVE_COMPOSE_SERVICES, + BASE_PASSIVE_FAIR_COMPOSE_SERVICES, is_admin_running, is_api_running, ) @@ -76,7 +83,7 @@ post_request, ) from node_cli.utils.meta import CliMetaManager -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode from node_cli.utils.print_formatters import ( print_failed_requirements_checks, print_node_cmd_error, @@ -102,18 +109,20 @@ class NodeStatuses(Enum): NOT_CREATED = 5 -def is_update_safe() -> bool: - if not is_admin_running(): - if is_passive_node(): +def is_update_safe(node_type: NodeType, node_mode: NodeMode) -> bool: + if not is_admin_running(node_type, node_mode): + if node_mode == NodeMode.PASSIVE: return True - elif not is_api_running(): + elif not is_api_running(node_type): return True status, payload = get_request(BLUEPRINT_NAME, 'update-safe') if status == 'error': return False - safe = payload['update_safe'] + if not isinstance(payload, dict): + return False + safe = bool(payload.get('update_safe')) if not safe: - logger.info('Locked schains: %s', payload['unsafe_chains']) + logger.info('Locked schains: %s', payload.get('unsafe_chains')) return safe @@ -145,12 +154,13 @@ def register_node(name, p2p_ip, public_ip, port, domain_name): @check_not_inited def init(env_filepath: str, node_type: NodeType) -> None: - env = compose_node_env(env_filepath=env_filepath, node_type=node_type) + node_mode = NodeMode.ACTIVE + env = compose_node_env(env_filepath=env_filepath, node_type=node_type, node_mode=node_mode) - init_op(env_filepath=env_filepath, env=env, node_type=node_type) + init_op(env_filepath=env_filepath, env=env, node_type=node_type, node_mode=node_mode) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(node_type=node_type): + if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Generating resource allocation file ...') update_resource_allocation(env['ENV_TYPE']) @@ -159,7 +169,8 @@ def init(env_filepath: str, node_type: NodeType) -> None: @check_not_inited def restore(backup_path, env_filepath, node_type: NodeType, no_snapshot=False, config_only=False): - env = compose_node_env(env_filepath=env_filepath, node_type=node_type) + node_mode = NodeMode.ACTIVE + env = compose_node_env(env_filepath=env_filepath, node_type=node_type, node_mode=node_mode) if env is None: return save_env_params(env_filepath) @@ -182,13 +193,14 @@ def restore(backup_path, env_filepath, node_type: NodeType, no_snapshot=False, c def init_passive( env_filepath: str, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str] ) -> None: - env = compose_node_env(env_filepath, node_type=NodeType.PASSIVE) + node_mode = NodeMode.PASSIVE + env = compose_node_env(env_filepath, node_type=NodeType.SKALE, node_mode=node_mode) if env is None: return init_passive_op(env_filepath, env, indexer, archive, snapshot, snapshot_from) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(node_type=NodeType.PASSIVE): + if not is_base_containers_alive(node_type=NodeType.SKALE, node_mode=node_mode): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Passive node initialized successfully') @@ -200,12 +212,12 @@ def update_passive(env_filepath: str, unsafe_ok: bool = False) -> None: prev_version = CliMetaManager().get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() - env = compose_node_env(env_filepath, node_type=NodeType.PASSIVE) + env = compose_node_env(env_filepath, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) update_ok = update_passive_op(env_filepath, env) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=NodeType.PASSIVE) + alive = is_base_containers_alive(node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) if not update_ok or not alive: print_node_cmd_error() return @@ -216,7 +228,9 @@ def update_passive(env_filepath: str, unsafe_ok: bool = False) -> None: @check_inited @check_user def cleanup_passive() -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.PASSIVE) + env = compose_node_env( + SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE + ) schain_name = env['SCHAIN_NAME'] cleanup_passive_op(env, schain_name) logger.info('Passive node was cleaned up, all containers and data removed') @@ -224,7 +238,8 @@ def cleanup_passive() -> None: def compose_node_env( env_filepath: str, - # node_type: NodeType, + node_type: NodeType, + node_mode: NodeMode, inited_node: bool = False, sync_schains: Optional[bool] = None, pull_config_for_schain: Optional[str] = None, @@ -234,6 +249,7 @@ def compose_node_env( if env_filepath is not None: user_config = get_validated_user_config( node_type=node_type, + node_mode=node_mode, env_filepath=env_filepath, is_fair_boot=is_fair_boot, ) @@ -246,7 +262,7 @@ def compose_node_env( is_fair_boot=is_fair_boot, ) - if node_type == NodeType.PASSIVE or node_type == NodeType.FAIR: + if node_mode == NodeMode.PASSIVE or node_type == NodeType.FAIR: mnt_dir = SCHAINS_MNT_DIR_SINGLE_CHAIN else: mnt_dir = SCHAINS_MNT_DIR_REGULAR @@ -259,10 +275,10 @@ def compose_node_env( **user_config.to_env(), } - if inited_node and not node_type == NodeType.PASSIVE: + if inited_node and not node_mode == NodeMode.PASSIVE: env['FLASK_SECRET_KEY'] = get_flask_secret_key() - if sync_schains and not node_type == NodeType.PASSIVE: + if sync_schains and not node_mode == NodeMode.PASSIVE: env['BACKUP_RUN'] = 'True' if pull_config_for_schain: @@ -279,7 +295,9 @@ def update( node_type: NodeType, unsafe_ok: bool = False, ) -> None: - if not unsafe_ok and not is_update_safe(node_type=node_type): + node_mode = get_node_mode() + + if not unsafe_ok and not is_update_safe(node_type=node_type, node_mode=node_mode): error_msg = 'Cannot update safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) @@ -293,12 +311,13 @@ def update( sync_schains=False, pull_config_for_schain=pull_config_for_schain, node_type=node_type, + node_mode=node_mode, ) - update_ok = update_op(env_filepath, env, node_type=node_type) + update_ok = update_op(env_filepath, env, node_type=node_type, node_mode=node_mode) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=node_type) + alive = is_base_containers_alive(node_type=node_type, node_mode=node_mode) if not update_ok or not alive: print_node_cmd_error() return @@ -401,25 +420,33 @@ def set_maintenance_mode_off(): @check_inited @check_user def turn_off(node_type: NodeType, maintenance_on: bool = False, unsafe_ok: bool = False) -> None: - if not unsafe_ok and not is_update_safe(node_type=node_type): + node_mode = get_node_mode() + if not unsafe_ok and not is_update_safe(node_type=node_type, node_mode=node_mode): error_msg = 'Cannot turn off safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) if maintenance_on: set_maintenance_mode_on() - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type) - turn_off_op(node_type=node_type, env=env) + env = compose_node_env( + SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type, node_mode=node_mode + ) + turn_off_op(node_type=node_type, node_mode=node_mode, env=env) @check_inited @check_user def turn_on(maintenance_off, sync_schains, env_file, node_type: NodeType) -> None: + node_mode = get_node_mode() env = compose_node_env( - env_file, inited_node=True, sync_schains=sync_schains, node_type=node_type + env_file, + inited_node=True, + sync_schains=sync_schains, + node_type=node_type, + node_mode=node_mode, ) - turn_on_op(env=env, node_type=node_type) + turn_on_op(env=env, node_type=node_type, node_mode=node_mode) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(node_type=node_type): + if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): print_node_cmd_error() return logger.info('Node turned on') @@ -427,21 +454,30 @@ def turn_on(maintenance_off, sync_schains, env_file, node_type: NodeType) -> Non set_maintenance_mode_off() -def get_expected_container_names(node_type: NodeType, is_fair_boot: bool) -> list[str]: +def get_expected_container_names( + node_type: NodeType, + node_mode: NodeMode, + is_fair_boot: bool, +) -> list[str]: if node_type == NodeType.FAIR and is_fair_boot: services = BASE_FAIR_BOOT_COMPOSE_SERVICES - elif node_type == NodeType.FAIR and not is_fair_boot: + elif active_fair(node_type, node_mode): services = BASE_FAIR_COMPOSE_SERVICES - elif node_type == NodeType.PASSIVE: - services = BASE_PASSIVE_COMPOSE_SERVICES - else: + elif passive_fair(node_type, node_mode): + services = BASE_PASSIVE_FAIR_COMPOSE_SERVICES + elif active_skale(node_type, node_mode): services = BASE_SKALE_COMPOSE_SERVICES - + elif passive_skale(node_type, node_mode): + services = BASE_PASSIVE_COMPOSE_SERVICES return list(services.values()) -def is_base_containers_alive(node_type: NodeType, is_fair_boot: bool = False) -> bool: - base_container_names = get_expected_container_names(node_type, is_fair_boot) +def is_base_containers_alive( + node_type: NodeType, + node_mode: NodeMode, + is_fair_boot: bool = False, +) -> bool: + base_container_names = get_expected_container_names(node_type, node_mode, is_fair_boot) dclient = docker.from_env() running_container_names = set(container.name for container in dclient.containers.list()) @@ -492,6 +528,7 @@ def set_domain_name(domain_name): def run_checks( node_type: NodeType, + node_mode: NodeMode, network: str = 'mainnet', container_config_path: str = CONTAINER_CONFIG_PATH, disk: Optional[str] = None, @@ -501,7 +538,7 @@ def run_checks( return if disk is None: - env_config = get_validated_user_config(node_type=node_type) + env_config = get_validated_user_config(node_type=node_type, node_mode=node_mode) disk = env_config.disk_mountpoint failed_checks = run_host_checks(disk, node_type, network, container_config_path) if not failed_checks: diff --git a/node_cli/core/node_options.py b/node_cli/core/node_options.py index e6500399..889978fe 100644 --- a/node_cli/core/node_options.py +++ b/node_cli/core/node_options.py @@ -22,7 +22,6 @@ from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.helper import read_json, write_json, init_file from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH -from node_cli.cli.info import TYPE logger = logging.getLogger(__name__) @@ -90,19 +89,22 @@ def mark_passive_node() -> None: logger.info('Node marked as passive.') -def is_active_node() -> bool: +def get_node_mode() -> NodeMode: node_options = NodeOptions() - return node_options.node_mode == NodeMode.ACTIVE + return node_options.node_mode -def is_passive_node() -> bool: - node_options = NodeOptions() - return node_options.node_mode == NodeMode.PASSIVE +def active_skale(node_type: NodeType, node_mode: NodeMode) -> bool: + return node_mode == NodeMode.ACTIVE and node_type == NodeType.SKALE + + +def active_fair(node_type: NodeType, node_mode: NodeMode) -> bool: + return node_mode == NodeMode.ACTIVE and node_type == NodeType.FAIR -def is_skale_node() -> bool: - return TYPE == NodeType.SKALE +def passive_skale(node_type: NodeType, node_mode: NodeMode) -> bool: + return node_mode == NodeMode.PASSIVE and node_type == NodeType.SKALE -def is_fair_node() -> bool: - return TYPE == NodeType.FAIR +def passive_fair(node_type: NodeType, node_mode: NodeMode) -> bool: + return node_mode == NodeMode.PASSIVE and node_type == NodeType.FAIR diff --git a/node_cli/fair/fair_boot.py b/node_cli/fair/fair_boot.py index 290bbb1e..b8da120c 100644 --- a/node_cli/fair/fair_boot.py +++ b/node_cli/fair/fair_boot.py @@ -23,11 +23,12 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive +from node_cli.core.node_options import get_node_mode from node_cli.operations import init_fair_boot_op, update_fair_boot_op from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_node_cmd_error logger = logging.getLogger(__name__) @@ -35,16 +36,19 @@ @check_not_inited def init(env_filepath: str) -> None: + node_mode = NodeMode.ACTIVE + node_type = NodeType.FAIR env = compose_node_env( env_filepath, - node_type=NodeType.FAIR, + node_type=node_type, + node_mode=node_mode, is_fair_boot=True, ) init_fair_boot_op(env_filepath, env) logger.info('Waiting for fair containers initialization') time.sleep(TM_INIT_TIMEOUT) - if not is_base_containers_alive(node_type=NodeType.FAIR, is_fair_boot=True): + if not is_base_containers_alive(node_type=node_type, node_mode=node_mode, is_fair_boot=True): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Init fair procedure finished') @@ -53,19 +57,23 @@ def init(env_filepath: str) -> None: @check_user def update(env_filepath: str, pull_config_for_schain: str) -> None: logger.info('Fair boot node update started') + node_mode = get_node_mode() env = compose_node_env( env_filepath, inited_node=True, sync_schains=False, pull_config_for_schain=pull_config_for_schain, node_type=NodeType.FAIR, + node_mode=node_mode, is_fair_boot=True, ) migrate_ok = update_fair_boot_op(env_filepath, env) if migrate_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) - alive = is_base_containers_alive(node_type=NodeType.FAIR, is_fair_boot=True) + alive = is_base_containers_alive( + node_type=NodeType.FAIR, node_mode=node_mode, is_fair_boot=True + ) if not migrate_ok or not alive: print_node_cmd_error() return diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/fair_node.py index e6f2fb1d..a8a29ebb 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/fair_node.py @@ -27,6 +27,7 @@ from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.host import is_node_inited, save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive +from node_cli.core.node_options import get_node_mode from node_cli.operations import ( FairUpdateType, cleanup_fair_op, @@ -38,7 +39,7 @@ from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit, get_request, post_request -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_node_cmd_error, print_node_info_fair from node_cli.utils.texts import safe_load_texts @@ -67,13 +68,16 @@ def get_node_info(format): @check_not_inited def restore_fair(backup_path, env_filepath, config_only=False): - env = compose_node_env(env_filepath, node_type=NodeType.FAIR) + node_mode = NodeMode.ACTIVE + env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) if env is None: return save_env_params(env_filepath) env['SKALE_DIR'] = SKALE_DIR - restored_ok = restore_fair_op(env, backup_path, config_only=config_only) + restored_ok = restore_fair_op( + node_mode=node_mode, env=env, backup_path=backup_path, config_only=config_only + ) if not restored_ok: error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) time.sleep(RESTORE_SLEEP_TIMEOUT) @@ -91,11 +95,16 @@ def migrate_from_boot( inited_node=True, sync_schains=False, node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, ) migrate_ok = update_fair_op( - env_filepath, env, update_type=FairUpdateType.FROM_BOOT, force_skaled_start=False + node_mode=NodeMode.ACTIVE, + env_filepath=env_filepath, + env=env, + update_type=FairUpdateType.FROM_BOOT, + force_skaled_start=False, ) - alive = is_base_containers_alive(node_type=NodeType.FAIR) + alive = is_base_containers_alive(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) if not migrate_ok or not alive: print_node_cmd_error() return @@ -114,20 +123,23 @@ def update( pull_config_for_schain, force_skaled_start, ) + node_mode = get_node_mode() env = compose_node_env( env_filepath, inited_node=True, sync_schains=False, node_type=NodeType.FAIR, + node_mode=node_mode, pull_config_for_schain=pull_config_for_schain, ) update_ok = update_fair_op( - env_filepath, - env, + node_mode=node_mode, + env_filepath=env_filepath, + env=env, update_type=FairUpdateType.REGULAR, force_skaled_start=force_skaled_start, ) - alive = is_base_containers_alive(node_type=NodeType.FAIR) + alive = is_base_containers_alive(node_type=NodeType.FAIR, node_mode=node_mode) if not update_ok or not alive: print_node_cmd_error() return @@ -137,15 +149,19 @@ def update( @check_user def cleanup() -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) - cleanup_fair_op(env) + node_mode = get_node_mode() + env = compose_node_env( + SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode + ) + cleanup_fair_op(node_mode=node_mode, env=env) logger.info('Fair node was cleaned up, all containers and data removed') cleanup_docker_configuration() @check_not_inited def init(env_filepath: str) -> None: - env = compose_node_env(env_filepath, node_type=NodeType.FAIR) + node_mode = NodeMode.ACTIVE + env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) if env is None: return save_env_params(env_filepath) @@ -178,8 +194,11 @@ def register(ip: str) -> None: def repair_chain(snapshot_from: str = 'any') -> None: - env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR) - repair_fair_op(env=env, snapshot_from=snapshot_from) + node_mode = get_node_mode() + env = compose_node_env( + SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode + ) + repair_fair_op(node_mode=node_mode, env=env, snapshot_from=snapshot_from) @check_inited diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 47ce58aa..5168fab1 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -41,7 +41,12 @@ ) from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config -from node_cli.core.node_options import NodeOptions +from node_cli.core.node_options import ( + NodeOptions, + get_node_mode, + mark_active_node, + mark_passive_node, +) from node_cli.core.resources import init_shared_space_volume, update_resource_allocation from node_cli.core.schains import ( cleanup_no_lvm_datadir, @@ -108,8 +113,8 @@ def wrapper(env_filepath: str, env: Dict, *args, **kwargs): @checked_host -def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: - compose_rm(node_type=node_type, env=env) +def update(env_filepath: str, env: Dict, node_type: NodeType, node_mode: NodeMode) -> bool: + compose_rm(node_type=node_type, node_mode=node_mode, env=env) remove_dynamic_containers() sync_skale_node() @@ -145,14 +150,14 @@ def update(env_filepath: str, env: Dict, node_type: NodeType) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=node_type) - compose_up(env=env, node_type=node_type) + update_images(env=env, node_type=node_type, node_mode=node_mode) + compose_up(env=env, node_type=node_type, node_mode=node_mode) return True @checked_host def update_fair_boot(env_filepath: str, env: Dict) -> bool: - compose_rm(node_type=NodeType.FAIR, env=env) + compose_rm(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, env=env) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -187,13 +192,13 @@ def update_fair_boot(env_filepath: str, env: Dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.FAIR) - compose_up(env=env, node_type=NodeType.FAIR, is_fair_boot=True) + update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) + compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True) return True @checked_host -def init(env_filepath: str, env: dict, node_type: NodeType) -> None: +def init(env_filepath: str, env: dict, node_type: NodeType, node_mode: NodeMode) -> None: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() if env.get('SKIP_DOCKER_CONFIG') != 'True': @@ -205,8 +210,7 @@ def init(env_filepath: str, env: dict, node_type: NodeType) -> None: prepare_host(env_filepath, env_type=env['ENV_TYPE']) link_env_file() - node_options = NodeOptions() - node_options.node_mode = NodeMode.ACTIVE + mark_active_node() configure_filebeat() configure_flask() @@ -224,9 +228,9 @@ def init(env_filepath: str, env: dict, node_type: NodeType) -> None: distro.version(), ) update_resource_allocation(env_type=env['ENV_TYPE']) - update_images(env=env, node_type=node_type) + update_images(env=env, node_type=node_type, node_mode=node_mode) - compose_up(env=env, node_type=node_type) + compose_up(env=env, node_type=node_type, node_mode=node_mode) @checked_host @@ -243,6 +247,7 @@ def init_fair_boot(env_filepath: str, env: dict) -> None: prepare_host(env_filepath, env_type=env['ENV_TYPE']) link_env_file() + mark_active_node() configure_filebeat() configure_flask() @@ -256,9 +261,9 @@ def init_fair_boot(env_filepath: str, env: dict) -> None: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.FAIR) + update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) - compose_up(env=env, node_type=NodeType.FAIR, is_fair_boot=True) + compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True) def init_passive( @@ -289,6 +294,8 @@ def init_passive( node_options.catchup = archive or indexer node_options.historic_state = archive + mark_passive_node() + ensure_filestorage_mapping() link_env_file() @@ -310,13 +317,12 @@ def init_passive( ts = int(time.time()) update_node_cli_schain_status(schain_name, repair_ts=ts, snapshot_from=snapshot_from) - update_images(env=env, node_type=NodeType.PASSIVE) - - compose_up(env=env, node_type=NodeType.PASSIVE) + update_images(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) + compose_up(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) def update_passive(env_filepath: str, env: Dict) -> bool: - compose_rm(env=env, node_type=NodeType.PASSIVE) + compose_rm(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) download_skale_node(env['NODE_VERSION'], env.get('CONTAINER_CONFIGS_DIR')) @@ -343,20 +349,19 @@ def update_passive(env_filepath: str, env: Dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.PASSIVE) - - compose_up(env=env, node_type=NodeType.PASSIVE) + update_images(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) + compose_up(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) return True -def turn_off(env: dict, node_type: NodeType) -> None: +def turn_off(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: logger.info('Turning off the node...') - compose_rm(env=env, node_type=node_type) + compose_rm(env=env, node_type=node_type, node_mode=node_mode) remove_dynamic_containers() logger.info('Node was successfully turned off') -def turn_on(env: dict, node_type: NodeType) -> None: +def turn_on(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: logger.info('Turning on the node...') meta_manager = CliMetaManager() meta_manager.update_meta( @@ -373,10 +378,11 @@ def turn_on(env: dict, node_type: NodeType) -> None: configure_nftables(enable_monitoring=enable_monitoring) logger.info('Launching containers on the node...') - compose_up(env=env, node_type=node_type) + compose_up(env=env, node_type=node_type, node_mode=node_mode) def restore(env, backup_path, node_type: NodeType, config_only=False): + node_mode = get_node_mode() unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -410,7 +416,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): distro.version(), ) if not config_only: - compose_up(env=env, node_type=node_type) + compose_up(env=env, node_type=node_type, node_mode=node_mode) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -426,7 +432,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): def cleanup_passive(env, schain_name: str) -> None: - turn_off(env, node_type=NodeType.PASSIVE) + turn_off(env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) cleanup_no_lvm_datadir(chain_name=schain_name) rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) diff --git a/node_cli/operations/config_repo.py b/node_cli/operations/config_repo.py index 656e4cb1..b8456db6 100644 --- a/node_cli/operations/config_repo.py +++ b/node_cli/operations/config_repo.py @@ -27,18 +27,18 @@ from node_cli.utils.git_utils import clone_repo from node_cli.utils.docker_utils import compose_pull, compose_build from node_cli.configs import CONTAINER_CONFIG_PATH, CONTAINER_CONFIG_TMP_PATH, SKALE_NODE_REPO_URL -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode logger = logging.getLogger(__name__) -def update_images(env: dict, node_type: NodeType) -> None: +def update_images(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: local = env.get('CONTAINER_CONFIGS_DIR') != '' if local: - compose_build(env=env, node_type=node_type) + compose_build(env=env, node_type=node_type, node_mode=node_mode) else: - compose_pull(env=env, node_type=node_type) + compose_pull(env=env, node_type=node_type, node_mode=node_mode) def download_skale_node(stream: Optional[str] = None, src: Optional[str] = None) -> None: diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 9a1d15c6..d9898a10 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -54,7 +54,6 @@ from node_cli.utils.docker_utils import ( REDIS_SERVICE_DICT, REDIS_START_TIMEOUT, - NodeType, compose_rm, compose_up, docker_cleanup, @@ -67,6 +66,7 @@ from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool from node_cli.utils.meta import FairCliMetaManager from node_cli.utils.print_formatters import TEXTS, print_failed_requirements_checks +from node_cli.utils.node_type import NodeMode, NodeType logger = logging.getLogger(__name__) @@ -103,8 +103,8 @@ def init(env_filepath: str, env: dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.FAIR) - compose_up(env=env, node_type=NodeType.FAIR) + update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) + compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) wait_for_container(REDIS_SERVICE_DICT['redis']) time.sleep(REDIS_START_TIMEOUT) return True @@ -112,7 +112,7 @@ def init(env_filepath: str, env: dict) -> bool: @checked_host def update_fair_boot(env_filepath: str, env: dict) -> bool: - compose_rm(node_type=NodeType.FAIR, env=env) + compose_rm(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, env=env) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -147,19 +147,20 @@ def update_fair_boot(env_filepath: str, env: dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.FAIR) - compose_up(env=env, node_type=NodeType.FAIR, is_fair_boot=True) + update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) + compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True) return True @checked_host def update( + node_mode: NodeMode, env_filepath: str, env: dict, update_type: FairUpdateType, force_skaled_start: bool, ) -> bool: - compose_rm(node_type=NodeType.FAIR, env=env) + compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=env) if update_type not in (FairUpdateType.INFRA_ONLY, FairUpdateType.FROM_BOOT): remove_dynamic_containers() @@ -195,19 +196,21 @@ def update( if update_type == FairUpdateType.FROM_BOOT: migrate_nftables_from_boot(chain_name=fair_chain_name) - update_images(env=env, node_type=NodeType.FAIR) + update_images(env=env, node_type=NodeType.FAIR, node_mode=node_mode) - compose_up(env=env, node_type=NodeType.FAIR, services=list(REDIS_SERVICE_DICT)) + compose_up( + env=env, node_type=NodeType.FAIR, node_mode=node_mode, services=list(REDIS_SERVICE_DICT) + ) wait_for_container(REDIS_SERVICE_DICT['redis']) time.sleep(REDIS_START_TIMEOUT) if update_type == FairUpdateType.FROM_BOOT: migrate_chain_record(env) update_chain_record(env, force_skaled_start=force_skaled_start) - compose_up(env=env, node_type=NodeType.FAIR) + compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) return True -def restore(env, backup_path, config_only=False): +def restore(node_mode: NodeMode, env, backup_path, config_only=False): unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -239,7 +242,7 @@ def restore(env, backup_path, config_only=False): ) if not config_only: - compose_up(env=env, node_type=NodeType.FAIR) + compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], @@ -254,8 +257,8 @@ def restore(env, backup_path, config_only=False): return True -def cleanup(env: dict) -> None: - turn_off(env, node_type=NodeType.FAIR) +def cleanup(node_mode: NodeMode, env: dict) -> None: + turn_off(env, node_type=NodeType.FAIR, node_mode=node_mode) cleanup_no_lvm_datadir() rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) @@ -271,10 +274,10 @@ def trigger_skaled_snapshot_mode(env: dict, snapshot_from: str = 'any') -> None: print(TEXTS['fair']['node']['repair']['repair_requested']) -def repair(env: dict, snapshot_from: str = 'any') -> None: +def repair(node_mode: NodeMode, env: dict, snapshot_from: str = 'any') -> None: logger.info('Starting fair node repair') container_name = 'fair_admin' - if is_admin_running(): + if is_admin_running(node_type=NodeType.FAIR, node_mode=node_mode): logger.info('Stopping admin container') stop_container_by_name(container_name=container_name) logger.info('Removing chain container') diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index ac9a428e..83a8dc6f 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -33,13 +33,14 @@ COMPOSE_PATH, FAIR_COMPOSE_PATH, NGINX_CONTAINER_NAME, + PASSIVE_FAIR_COMPOSE_PATH, REMOVED_CONTAINERS_FOLDER_PATH, SGX_CERTIFICATES_DIR_NAME, PASSIVE_COMPOSE_PATH, ) -from node_cli.core.node_options import is_active_node, is_fair_node +from node_cli.core.node_options import active_fair, active_skale, passive_fair, passive_skale from node_cli.utils.helper import run_cmd, str_to_bool -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeMode, NodeType logger = logging.getLogger(__name__) @@ -82,6 +83,11 @@ 'nginx': 'skale_nginx', } +BASE_PASSIVE_FAIR_COMPOSE_SERVICES = { + 'fair-passive-admin': 'fair_passive_admin', + 'nginx': 'skale_nginx', +} + MONITORING_COMPOSE_SERVICES = { 'node-exporter': 'monitor_node_exporter', 'advisor': 'monitor_cadvisor', @@ -255,9 +261,9 @@ def is_volume_exists(name: str, dutils=None): return True -def compose_rm(node_type: NodeType, env={}): +def compose_rm(node_type: NodeType, node_mode: NodeMode, env={}): logger.info('Removing compose containers') - compose_path = get_compose_path(node_type) + compose_path = get_compose_path(node_type, node_mode) run_cmd( cmd=( 'docker', @@ -273,81 +279,98 @@ def compose_rm(node_type: NodeType, env={}): logger.info('Compose containers removed') -def compose_pull(env: dict, node_type: NodeType): +def compose_pull(env: dict, node_type: NodeType, node_mode: NodeMode): logger.info('Pulling compose containers') - compose_path = get_compose_path(node_type) + compose_path = get_compose_path(node_type, node_mode) run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'pull'), env=env) -def compose_build(env: dict, node_type: NodeType): +def compose_build(env: dict, node_type: NodeType, node_mode: NodeMode): logger.info('Building compose containers') - compose_path = get_compose_path(node_type) + compose_path = get_compose_path(node_type, node_mode) run_cmd(cmd=('docker', 'compose', '-f', compose_path, 'build'), env=env) -def get_compose_path(node_type: NodeType) -> str: - if node_type == NodeType.PASSIVE: +def get_compose_path(node_type: NodeType, node_mode: NodeMode) -> str: + if passive_skale(node_type, node_mode): return PASSIVE_COMPOSE_PATH - elif node_type == NodeType.FAIR: + elif active_fair(node_type, node_mode): return FAIR_COMPOSE_PATH - else: - return COMPOSE_PATH + elif passive_fair(node_type, node_mode): + return PASSIVE_FAIR_COMPOSE_PATH + return COMPOSE_PATH -def get_compose_services(node_type: NodeType) -> list[str]: - if node_type == NodeType.PASSIVE: - result = list(BASE_PASSIVE_COMPOSE_SERVICES) - elif node_type == NodeType.FAIR: - result = list(BASE_FAIR_COMPOSE_SERVICES) - else: - result = list(BASE_SKALE_COMPOSE_SERVICES) +def get_compose_services(node_type: NodeType, node_mode: NodeMode) -> list[str]: + if passive_skale(node_type, node_mode): + return list(BASE_PASSIVE_COMPOSE_SERVICES) + elif active_fair(node_type, node_mode): + return list(BASE_FAIR_COMPOSE_SERVICES) + elif passive_fair(node_type, node_mode): + return list(BASE_PASSIVE_FAIR_COMPOSE_SERVICES) + return list(BASE_SKALE_COMPOSE_SERVICES) - return result - -def get_up_compose_cmd(node_type: NodeType, services: list[str] | None = None) -> tuple: - compose_path = get_compose_path(node_type) +def get_up_compose_cmd( + node_type: NodeType, node_mode: NodeMode, services: list[str] | None = None +) -> tuple: + compose_path = get_compose_path(node_type, node_mode) if services is None: - services = get_compose_services(node_type) + services = get_compose_services(node_type, node_mode) return ('docker', 'compose', '-f', compose_path, 'up', '-d', *services) def compose_up( - env, node_type: NodeType, is_fair_boot: bool = False, services: list[str] | None = None + env, + node_type: NodeType, + node_mode: NodeMode, + is_fair_boot: bool = False, + services: list[str] | None = None, ): - if node_type == NodeType.PASSIVE: + if passive_skale(node_type, node_mode) or passive_fair(node_type, node_mode): logger.info('Running containers for passive node') - run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.PASSIVE), env=env) + run_cmd(cmd=get_up_compose_cmd(node_type=node_type, node_mode=node_mode), env=env) return if 'SGX_CERTIFICATES_DIR_NAME' not in env: env['SGX_CERTIFICATES_DIR_NAME'] = SGX_CERTIFICATES_DIR_NAME - if node_type == NodeType.FAIR: + if active_fair(node_type, node_mode): logger.info('Running fair base set of containers') if is_fair_boot: logger.debug('Launching fair boot containers with env %s', env) run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.FAIR, services=list(BASE_FAIR_BOOT_COMPOSE_SERVICES) + node_type=node_type, + node_mode=node_mode, + services=list(BASE_FAIR_BOOT_COMPOSE_SERVICES), ), env=env, ) else: logger.debug('Launching fair containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.FAIR, services=services), env=env) - else: + run_cmd( + cmd=get_up_compose_cmd( + node_type=node_type, + node_mode=node_mode, + services=services, + ), + env=env, + ) + elif active_skale(node_type, node_mode): logger.info('Running skale node base set of containers') logger.debug('Launching skale node containers with env %s', env) - run_cmd(cmd=get_up_compose_cmd(node_type=NodeType.SKALE), env=env) + run_cmd(cmd=get_up_compose_cmd(node_type=node_type, node_mode=node_mode), env=env) if 'TG_API_KEY' in env and 'TG_CHAT_ID' in env: logger.info('Running containers for Telegram notifications') run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.SKALE, services=list(NOTIFICATION_COMPOSE_SERVICES) + node_type=NodeType.SKALE, + node_mode=node_mode, + services=list(NOTIFICATION_COMPOSE_SERVICES), ), env=env, ) @@ -356,7 +379,9 @@ def compose_up( logger.info('Running monitoring containers') run_cmd( cmd=get_up_compose_cmd( - node_type=NodeType.SKALE, services=list(MONITORING_COMPOSE_SERVICES) + node_type=NodeType.SKALE, + node_mode=node_mode, + services=list(MONITORING_COMPOSE_SERVICES), ), env=env, ) @@ -396,24 +421,26 @@ def is_container_running(name: str, dclient: Optional[DockerClient] = None) -> b return False -def is_api_running(dclient: Optional[DockerClient] = None) -> bool: - if is_fair_node(): +def is_api_running(node_type: NodeType, dclient: Optional[DockerClient] = None) -> bool: + if node_type == NodeType.FAIR: return is_container_running(name='fair_api', dclient=dclient) else: return is_container_running(name='skale_api', dclient=dclient) -def is_admin_running(dclient: Optional[DockerClient] = None) -> bool: - if is_fair_node(): - if is_active_node(): - container_name = 'fair_admin' - else: - container_name = 'fair_passive_admin' - else: - if is_active_node(): - container_name = 'skale_admin' - else: - container_name = 'skale_passive_admin' +def is_admin_running( + node_type: NodeType, + node_mode: NodeMode, + dclient: Optional[DockerClient] = None, +) -> bool: + if active_fair(node_type, node_mode): + container_name = 'fair_admin' + elif passive_fair(node_type, node_mode): + container_name = 'fair_passive_admin' + elif active_skale(node_type, node_mode): + container_name = 'skale_admin' + elif passive_skale(node_type, node_mode): + container_name = 'skale_passive_admin' return is_container_running(name=container_name, dclient=dclient) diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 4cdfc959..2db057ff 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -16,13 +16,14 @@ ALLOWED_ENV_TYPES, FairBootUserConfig, FairUserConfig, + PassiveFairUserConfig, SkaleUserConfig, PassiveSkaleUserConfig, get_user_config_class, get_validated_user_config, validate_env_type, ) -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode ENDPOINT = 'http://localhost:8545' @@ -37,17 +38,20 @@ def json(self): @pytest.mark.parametrize( - 'node_type, is_fair_boot, expected_type', + 'node_type, node_mode, is_fair_boot, expected_type', [ - (NodeType.SKALE, False, SkaleUserConfig), - (NodeType.PASSIVE, False, PassiveSkaleUserConfig), - (NodeType.FAIR, True, FairBootUserConfig), - (NodeType.FAIR, False, FairUserConfig), + (NodeType.SKALE, NodeMode.ACTIVE, False, SkaleUserConfig), + (NodeType.SKALE, NodeMode.PASSIVE, False, PassiveSkaleUserConfig), + (NodeType.FAIR, NodeMode.ACTIVE, True, FairBootUserConfig), + (NodeType.FAIR, NodeMode.ACTIVE, False, FairUserConfig), + (NodeType.FAIR, NodeMode.PASSIVE, False, PassiveFairUserConfig), ], - ids=['regular', 'passive', 'fair_boot', 'fair_regular'], + ids=['skale_active', 'skale_passive', 'fair_boot', 'fair_active', 'fair_passive'], ) -def test_build_env_params_keys(node_type, is_fair_boot, expected_type): - env_type = get_user_config_class(node_type=node_type, is_fair_boot=is_fair_boot) +def test_build_env_params_keys(node_type, node_mode, is_fair_boot, expected_type): + env_type = get_user_config_class( + node_type=node_type, node_mode=node_mode, is_fair_boot=is_fair_boot + ) assert env_type == expected_type @@ -166,7 +170,9 @@ def test_validate_env_alias_or_address_with_alias(requests_mock): def test_get_validated_env_config_missing_file(): with pytest.raises(SystemExit): - get_validated_user_config(env_filepath='nonexistent.env', node_type=NodeType.SKALE) + get_validated_user_config( + env_filepath='nonexistent.env', node_type=NodeType.SKALE, node_mode=NodeMode.ACTIVE + ) def test_get_validated_env_config_unreadable_file(tmp_path): @@ -176,6 +182,8 @@ def test_get_validated_env_config_unreadable_file(tmp_path): try: os.chmod(env_file, 0o000) with pytest.raises(PermissionError): - get_validated_user_config(env_filepath=str(env_file), node_type=NodeType.SKALE) + get_validated_user_config( + env_filepath=str(env_file), node_type=NodeType.SKALE, node_mode=NodeMode.ACTIVE + ) finally: os.chmod(env_file, original_mode) From 8e2e70d529cf9ce8216647bb8fb3167ec25e24e2 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Sun, 10 Aug 2025 16:30:16 +0100 Subject: [PATCH 182/332] Add passive fair node cli --- node_cli/cli/fair_boot.py | 2 +- node_cli/cli/fair_node.py | 21 ++-- node_cli/cli/passive_fair_node.py | 86 ++++++++++++++++ node_cli/fair/__init__.py | 15 +++ node_cli/fair/{fair_node.py => active.py} | 112 ++++----------------- node_cli/fair/{fair_boot.py => boot.py} | 0 node_cli/fair/common.py | 113 ++++++++++++++++++++++ node_cli/fair/passive.py | 25 +++++ node_cli/main.py | 2 + node_cli/operations/fair.py | 6 +- tests/fair/fair_node_test.py | 16 +-- 11 files changed, 283 insertions(+), 115 deletions(-) create mode 100644 node_cli/cli/passive_fair_node.py rename node_cli/fair/{fair_node.py => active.py} (67%) rename node_cli/fair/{fair_boot.py => boot.py} (100%) create mode 100644 node_cli/fair/common.py create mode 100644 node_cli/fair/passive.py diff --git a/node_cli/cli/fair_boot.py b/node_cli/cli/fair_boot.py index 24f3490a..f5dce9b1 100644 --- a/node_cli/cli/fair_boot.py +++ b/node_cli/cli/fair_boot.py @@ -22,7 +22,7 @@ from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.core.node import get_node_info, get_node_signature from node_cli.core.node import register_node as register -from node_cli.fair.fair_boot import init, update +from node_cli.fair.boot import init, update from node_cli.utils.helper import IP_TYPE, abort_if_false, error_exit, streamed_cmd diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 19b06c93..d76825b1 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -20,19 +20,22 @@ import click from node_cli.core.node import backup -from node_cli.fair.fair_node import change_ip as change_ip_fair -from node_cli.fair.fair_node import cleanup as fair_cleanup -from node_cli.fair.fair_node import exit as exit_fair -from node_cli.fair.fair_node import ( + +from node_cli.fair import change_ip as change_ip_fair +from node_cli.fair import cleanup as fair_cleanup +from node_cli.fair import exit as exit_fair +from node_cli.fair import ( get_node_info, migrate_from_boot, repair_chain, - restore_fair, + restore as restore_fair, ) -from node_cli.fair.fair_node import init as init_fair -from node_cli.fair.fair_node import register as register_fair -from node_cli.fair.fair_node import update as update_fair +from node_cli.fair import init as init_fair +from node_cli.fair import register as register_fair +from node_cli.fair import update as update_fair + from node_cli.utils.helper import IP_TYPE, URL_OR_ANY_TYPE, abort_if_false, streamed_cmd +from node_cli.utils.node_type import NodeMode from node_cli.utils.texts import safe_load_texts TEXTS = safe_load_texts() @@ -58,7 +61,7 @@ def fair_node_info(format): @click.argument('env_filepath') @streamed_cmd def init_node(env_filepath: str): - init_fair(env_filepath=env_filepath) + init_fair(node_mode=NodeMode.ACTIVE, env_filepath=env_filepath) @node.command('register', help=TEXTS['fair']['node']['register']['help']) diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py new file mode 100644 index 00000000..b0fb6c08 --- /dev/null +++ b/node_cli/cli/passive_fair_node.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import click + +from node_cli.fair import init as init_fair +from node_cli.fair import update as update_fair +from node_cli.fair import cleanup as cleanup_fair +from node_cli.utils.helper import abort_if_false, streamed_cmd +from node_cli.utils.node_type import NodeMode +from node_cli.utils.texts import safe_load_texts + +TEXTS = safe_load_texts() + + +@click.group() +def passive_fair_node_cli(): + pass + + +@passive_fair_node_cli.group(help='Commands for passive Fair Node operations.') +def passive_node(): + pass + + +@passive_node.command('init', help='Initialize a passive Fair node') +@click.argument('env_filepath') +@streamed_cmd +def init_passive_node(env_filepath: str): + init_fair(node_mode=NodeMode.PASSIVE, env_filepath=env_filepath) + + +@passive_node.command('update', help='Update Fair node') +@click.argument('env_filepath') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to update Fair node software?', +) +@click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) +@click.option( + '--force-skaled-start', + 'force_skaled_start', + hidden=True, + type=bool, + default=False, + is_flag=True, +) +@streamed_cmd +def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: bool): + update_fair( + env_filepath=env_filepath, + pull_config_for_schain=pull_config_for_schain, + force_skaled_start=force_skaled_start, + ) + + +@passive_node.command('cleanup', help='Cleanup Fair node.') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to cleanup Fair node?', +) +@streamed_cmd +def cleanup_node(): + cleanup_fair() diff --git a/node_cli/fair/__init__.py b/node_cli/fair/__init__.py index e69de29b..acac7093 100644 --- a/node_cli/fair/__init__.py +++ b/node_cli/fair/__init__.py @@ -0,0 +1,15 @@ +from node_cli.fair.common import ( + init as init, + update as update, + cleanup as cleanup, + repair_chain as repair_chain, +) +from node_cli.fair.active import ( + get_node_info_plain as get_node_info_plain, + get_node_info as get_node_info, + migrate_from_boot as migrate_from_boot, + register as register, + change_ip as change_ip, + exit as exit, + restore as restore, +) diff --git a/node_cli/fair/fair_node.py b/node_cli/fair/active.py similarity index 67% rename from node_cli/fair/fair_node.py rename to node_cli/fair/active.py index a8a29ebb..44ea1961 100644 --- a/node_cli/fair/fair_node.py +++ b/node_cli/fair/active.py @@ -23,16 +23,10 @@ from typing import cast from node_cli.configs import DEFAULT_SKALED_BASE_PORT, RESTORE_SLEEP_TIMEOUT, SKALE_DIR -from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH -from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.host import is_node_inited, save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.core.node_options import get_node_mode from node_cli.operations import ( FairUpdateType, - cleanup_fair_op, - init_fair_op, - repair_fair_op, restore_fair_op, update_fair_op, ) @@ -66,24 +60,6 @@ def get_node_info(format): print_node_info_fair(node_info) -@check_not_inited -def restore_fair(backup_path, env_filepath, config_only=False): - node_mode = NodeMode.ACTIVE - env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) - if env is None: - return - save_env_params(env_filepath) - env['SKALE_DIR'] = SKALE_DIR - - restored_ok = restore_fair_op( - node_mode=node_mode, env=env, backup_path=backup_path, config_only=config_only - ) - if not restored_ok: - error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) - time.sleep(RESTORE_SLEEP_TIMEOUT) - print('Fair node is restored from backup') - - @check_inited @check_user def migrate_from_boot( @@ -112,68 +88,6 @@ def migrate_from_boot( logger.info('Migration from boot to fair completed successfully') -@check_inited -@check_user -def update( - env_filepath: str, pull_config_for_schain: str | None = None, force_skaled_start: bool = False -) -> None: - logger.info( - 'Updating fair node: %s, pull_config_for_schain: %s, force_skaled_start: %s', - env_filepath, - pull_config_for_schain, - force_skaled_start, - ) - node_mode = get_node_mode() - env = compose_node_env( - env_filepath, - inited_node=True, - sync_schains=False, - node_type=NodeType.FAIR, - node_mode=node_mode, - pull_config_for_schain=pull_config_for_schain, - ) - update_ok = update_fair_op( - node_mode=node_mode, - env_filepath=env_filepath, - env=env, - update_type=FairUpdateType.REGULAR, - force_skaled_start=force_skaled_start, - ) - alive = is_base_containers_alive(node_type=NodeType.FAIR, node_mode=node_mode) - if not update_ok or not alive: - print_node_cmd_error() - return - else: - logger.info('Fair update completed successfully') - - -@check_user -def cleanup() -> None: - node_mode = get_node_mode() - env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode - ) - cleanup_fair_op(node_mode=node_mode, env=env) - logger.info('Fair node was cleaned up, all containers and data removed') - cleanup_docker_configuration() - - -@check_not_inited -def init(env_filepath: str) -> None: - node_mode = NodeMode.ACTIVE - env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) - if env is None: - return - save_env_params(env_filepath) - env['SKALE_DIR'] = SKALE_DIR - - init_ok = init_fair_op(env_filepath, env) - if not init_ok: - error_exit('Init operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) - time.sleep(RESTORE_SLEEP_TIMEOUT) - print('Fair node is initialized') - - @check_inited @check_user def register(ip: str) -> None: @@ -193,14 +107,6 @@ def register(ip: str) -> None: error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) -def repair_chain(snapshot_from: str = 'any') -> None: - node_mode = get_node_mode() - env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode - ) - repair_fair_op(node_mode=node_mode, env=env, snapshot_from=snapshot_from) - - @check_inited @check_user def change_ip(ip: str) -> None: @@ -236,3 +142,21 @@ def exit() -> None: error_msg = payload logger.error(f'Node exit error {error_msg}') error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +@check_not_inited +def restore(backup_path, env_filepath, config_only=False): + node_mode = NodeMode.ACTIVE + env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) + if env is None: + return + save_env_params(env_filepath) + env['SKALE_DIR'] = SKALE_DIR + + restored_ok = restore_fair_op( + node_mode=node_mode, env=env, backup_path=backup_path, config_only=config_only + ) + if not restored_ok: + error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) + time.sleep(RESTORE_SLEEP_TIMEOUT) + print('Fair node is restored from backup') diff --git a/node_cli/fair/fair_boot.py b/node_cli/fair/boot.py similarity index 100% rename from node_cli/fair/fair_boot.py rename to node_cli/fair/boot.py diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py new file mode 100644 index 00000000..9e0163f6 --- /dev/null +++ b/node_cli/fair/common.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import time +import logging + +from node_cli.configs import RESTORE_SLEEP_TIMEOUT, SKALE_DIR +from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH +from node_cli.core.docker_config import cleanup_docker_configuration +from node_cli.core.node import compose_node_env, is_base_containers_alive +from node_cli.core.node_options import get_node_mode +from node_cli.operations import ( + FairUpdateType, + cleanup_fair_op, + repair_fair_op, + update_fair_op, + init_fair_op, +) +from node_cli.core.host import save_env_params +from node_cli.utils.decorators import check_inited, check_not_inited, check_user +from node_cli.utils.node_type import NodeMode, NodeType +from node_cli.utils.print_formatters import print_node_cmd_error +from node_cli.utils.texts import safe_load_texts +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit + +logger = logging.getLogger(__name__) +TEXTS = safe_load_texts() + + +@check_not_inited +def init(node_mode: NodeMode, env_filepath: str) -> None: + env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) + if env is None: + return + save_env_params(env_filepath) + env['SKALE_DIR'] = SKALE_DIR + + init_ok = init_fair_op(node_mode, env_filepath, env) + if not init_ok: + error_exit('Init operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) + time.sleep(RESTORE_SLEEP_TIMEOUT) + print('Fair node is initialized') + + +@check_user +def cleanup() -> None: + node_mode = get_node_mode() + env = compose_node_env( + SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode + ) + cleanup_fair_op(node_mode=node_mode, env=env) + logger.info('Fair node was cleaned up, all containers and data removed') + cleanup_docker_configuration() + + +@check_inited +@check_user +def update( + env_filepath: str, pull_config_for_schain: str | None = None, force_skaled_start: bool = False +) -> None: + logger.info( + 'Updating fair node: %s, pull_config_for_schain: %s, force_skaled_start: %s', + env_filepath, + pull_config_for_schain, + force_skaled_start, + ) + node_mode = get_node_mode() + env = compose_node_env( + env_filepath, + inited_node=True, + sync_schains=False, + node_type=NodeType.FAIR, + node_mode=node_mode, + pull_config_for_schain=pull_config_for_schain, + ) + update_ok = update_fair_op( + node_mode=node_mode, + env_filepath=env_filepath, + env=env, + update_type=FairUpdateType.REGULAR, + force_skaled_start=force_skaled_start, + ) + alive = is_base_containers_alive(node_type=NodeType.FAIR, node_mode=node_mode) + if not update_ok or not alive: + print_node_cmd_error() + return + else: + logger.info('Fair update completed successfully') + + +def repair_chain(snapshot_from: str = 'any') -> None: + node_mode = get_node_mode() + env = compose_node_env( + SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode + ) + repair_fair_op(node_mode=node_mode, env=env, snapshot_from=snapshot_from) diff --git a/node_cli/fair/passive.py b/node_cli/fair/passive.py new file mode 100644 index 00000000..be047233 --- /dev/null +++ b/node_cli/fair/passive.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import logging + +from node_cli.utils.texts import safe_load_texts + +logger = logging.getLogger(__name__) +TEXTS = safe_load_texts() diff --git a/node_cli/main.py b/node_cli/main.py index e949f682..a2d3aba9 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -40,6 +40,7 @@ from node_cli.cli.passive_node import passive_node_cli from node_cli.cli.fair_boot import fair_boot_cli from node_cli.cli.fair_node import fair_node_cli +from node_cli.cli.passive_fair_node import passive_fair_node_cli from node_cli.cli.chain import chain_cli from node_cli.core.host import init_logs_dir from node_cli.utils.node_type import NodeType @@ -90,6 +91,7 @@ def get_sources_list() -> List[click.MultiCommand]: logs_cli, fair_boot_cli, fair_node_cli, + passive_fair_node_cli, chain_cli, wallet_cli, ssl_cli, diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index d9898a10..a35854a3 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -78,7 +78,7 @@ class FairUpdateType(Enum): @checked_host -def init(env_filepath: str, env: dict) -> bool: +def init(node_mode: NodeMode, env_filepath: str, env: dict) -> bool: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -103,8 +103,8 @@ def init(env_filepath: str, env: dict) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) - compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) + update_images(env=env, node_type=NodeType.FAIR, node_mode=node_mode) + compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) wait_for_container(REDIS_SERVICE_DICT['redis']) time.sleep(REDIS_START_TIMEOUT) return True diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 19e8fe4c..a4f1423c 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -4,9 +4,9 @@ from node_cli.configs import SKALE_DIR from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH -from node_cli.fair.fair_boot import init as init_boot -from node_cli.fair.fair_boot import update -from node_cli.fair.fair_node import cleanup, migrate_from_boot, restore_fair +from node_cli.fair.boot import init as init_boot +from node_cli.fair.boot import update +from node_cli.fair import cleanup, migrate_from_boot, restore from node_cli.operations.fair import FairUpdateType from node_cli.utils.node_type import NodeType @@ -28,7 +28,7 @@ def test_restore_fair( mock_restore_op.return_value = True backup_path = '/fake/backup' - restore_fair(backup_path, valid_env_file) + restore(backup_path, valid_env_file) mock_compose_env.assert_called_once_with(valid_env_file, node_type=NodeType.FAIR) mock_save_env.assert_called_once_with(valid_env_file) @@ -221,7 +221,7 @@ def test_cleanup_fails_when_user_invalid( """Test that cleanup fails when user validation fails""" import pytest - from node_cli.fair.fair_node import cleanup + from node_cli.fair import cleanup with pytest.raises(SystemExit): cleanup() @@ -270,7 +270,7 @@ def test_exit_success( resource_alloc, meta_file_v3, ): - from node_cli.fair.fair_node import exit + from node_cli.fair import exit mock_post_request.return_value = ('ok', {}) @@ -292,7 +292,7 @@ def test_exit_error( resource_alloc, meta_file_v3, ): - from node_cli.fair.fair_node import exit + from node_cli.fair import exit error_msg = 'Exit failed' mock_post_request.return_value = ('error', error_msg) @@ -313,7 +313,7 @@ def test_exit_not_inited( meta_file_v3, capsys, ): - from node_cli.fair.fair_node import exit + from node_cli.fair import exit exit() From a82671ebd5c2bd0b2eefacfd8ba20799fc04caec Mon Sep 17 00:00:00 2001 From: Dmytro Date: Sun, 10 Aug 2025 16:48:06 +0100 Subject: [PATCH 183/332] Update imports structure --- .github/workflows/test.yml | 10 ---------- node_cli/cli/fair_node.py | 18 +++++++++--------- node_cli/cli/passive_fair_node.py | 6 +++--- node_cli/fair/__init__.py | 16 ---------------- 4 files changed, 12 insertions(+), 38 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ed804b5c..42173eeb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -47,16 +47,6 @@ jobs: - name: Check build - skale run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64 - - name: Build binary - passive - run: | - mkdir -p ./dist - docker build . -t node-cli-builder - docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test passive - docker rm -f $(docker ps -aq) - - - name: Check build - passive - run: sudo /home/ubuntu/dist/skale-test-Linux-x86_64-passive - - name: Build binary - fair run: | mkdir -p ./dist diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index dad84db2..6013b44e 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -21,19 +21,19 @@ from node_cli.core.node import backup -from node_cli.fair import change_ip as change_ip_fair -from node_cli.fair import cleanup as fair_cleanup -from node_cli.fair import exit as exit_fair -from node_cli.fair import ( +from node_cli.fair.active import change_ip as change_ip_fair +from node_cli.fair.common import cleanup as fair_cleanup +from node_cli.fair.active import exit as exit_fair +from node_cli.fair.active import ( get_node_info, migrate_from_boot, - repair_chain, restore as restore_fair, ) -from node_cli.fair import init as init_fair -from node_cli.fair import register as register_fair -from node_cli.fair import update as update_fair -from node_cli.fair import set_domain_name as set_domain_name_fair +from node_cli.fair.common import init as init_fair +from node_cli.fair.active import register as register_fair +from node_cli.fair.common import update as update_fair +from node_cli.fair.active import set_domain_name as set_domain_name_fair +from node_cli.fair.common import repair_chain from node_cli.utils.helper import IP_TYPE, URL_OR_ANY_TYPE, abort_if_false, streamed_cmd from node_cli.utils.node_type import NodeMode diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index b0fb6c08..6c63bff7 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -19,9 +19,9 @@ import click -from node_cli.fair import init as init_fair -from node_cli.fair import update as update_fair -from node_cli.fair import cleanup as cleanup_fair +from node_cli.fair.common import init as init_fair +from node_cli.fair.common import update as update_fair +from node_cli.fair.common import cleanup as cleanup_fair from node_cli.utils.helper import abort_if_false, streamed_cmd from node_cli.utils.node_type import NodeMode from node_cli.utils.texts import safe_load_texts diff --git a/node_cli/fair/__init__.py b/node_cli/fair/__init__.py index eb92c927..e69de29b 100644 --- a/node_cli/fair/__init__.py +++ b/node_cli/fair/__init__.py @@ -1,16 +0,0 @@ -from node_cli.fair.common import ( - init as init, - update as update, - cleanup as cleanup, - repair_chain as repair_chain, -) -from node_cli.fair.active import ( - get_node_info_plain as get_node_info_plain, - get_node_info as get_node_info, - migrate_from_boot as migrate_from_boot, - register as register, - change_ip as change_ip, - set_domain_name as set_domain_name, - exit as exit, - restore as restore, -) From d3b9387725c7f4709cdb96af17bd2f8c555acd2b Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 11 Aug 2025 12:23:04 +0100 Subject: [PATCH 184/332] Update node-cli tests --- node_cli/configs/user.py | 24 +++--- tests/cli/node_test.py | 12 +-- tests/cli/passive_node_test.py | 2 +- tests/core/core_node_test.py | 130 ++++++++++++++++++++++----------- tests/core/nginx_test.py | 27 +++---- tests/fair/fair_node_test.py | 79 ++++++++++---------- 6 files changed, 163 insertions(+), 111 deletions(-) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index a4c70d5b..b094150d 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -29,6 +29,12 @@ from node_cli.configs.alias_address_validation import ContractType, validate_alias_or_address from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeMode, NodeType +from node_cli.core.node_options import ( + active_fair, + active_skale, + passive_skale, + passive_fair, +) SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') @@ -188,16 +194,14 @@ def get_user_config_class( ) -> type[BaseUserConfig]: if node_type == NodeType.FAIR and is_fair_boot: user_config_class = FairBootUserConfig - elif node_type == NodeType.FAIR: - if node_mode == NodeMode.PASSIVE: - user_config_class = PassiveFairUserConfig - else: - user_config_class = FairUserConfig - elif node_type == NodeType.SKALE: - if node_mode == NodeMode.PASSIVE: - user_config_class = PassiveSkaleUserConfig - else: - user_config_class = SkaleUserConfig + elif passive_fair(node_type, node_mode): + user_config_class = PassiveFairUserConfig + elif active_fair(node_type, node_mode): + user_config_class = FairUserConfig + elif passive_skale(node_type, node_mode): + user_config_class = PassiveSkaleUserConfig + elif active_skale(node_type, node_mode): + user_config_class = SkaleUserConfig return user_config_class diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index c20b390c..13d3d0c5 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -42,7 +42,7 @@ from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import init_default_logger from node_cli.utils.meta import CliMeta -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode from tests.helper import ( response_mock, run_command, @@ -322,14 +322,14 @@ def test_backup(): @pytest.mark.parametrize( - 'node_type,test_user_conf', + 'node_type,node_mode,test_user_conf', [ - (NodeType.SKALE, 'regular_user_conf'), - (NodeType.FAIR, 'fair_user_conf'), - (NodeType.PASSIVE, 'passive_user_conf'), + (NodeType.SKALE, NodeMode.ACTIVE, 'regular_user_conf'), + (NodeType.SKALE, NodeMode.PASSIVE, 'passive_user_conf'), + (NodeType.FAIR, NodeMode.ACTIVE, 'fair_user_conf'), ], ) -def test_restore(request, node_type, test_user_conf, mocked_g_config, tmp_path): +def test_restore(request, node_type, node_mode, test_user_conf, mocked_g_config, tmp_path): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) result = run_command(backup_node, [tmp_path]) backup_path = result.output.replace('Backup archive successfully created: ', '').replace( diff --git a/tests/cli/passive_node_test.py b/tests/cli/passive_node_test.py index 2b5a587d..94cde559 100644 --- a/tests/cli/passive_node_test.py +++ b/tests/cli/passive_node_test.py @@ -78,7 +78,7 @@ def test_init_passive_archive(mocked_g_config, clean_node_options, passive_user_ mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.cli.node.TYPE', NodeType.PASSIVE), + mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), ): result = run_command(_init_passive, [passive_user_conf.as_posix(), '--archive']) node_options = NodeOptions() diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index ba1ab95f..b35f7b64 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -5,6 +5,7 @@ from pathlib import Path import docker +from docker import errors as docker_errors import mock import pytest import requests @@ -21,7 +22,7 @@ update, ) from node_cli.utils.meta import CliMeta -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode from tests.helper import response_mock, safe_update_api_response, subprocess_run_mock from tests.resources_test import BIG_DISK_SIZE @@ -37,43 +38,46 @@ 'passive_WRONG_CONTAINER_8', ] -NODE_TYPE_BOOT_COMBINATIONS: list[tuple[NodeType, bool]] = [ - (NodeType.SKALE, False), - (NodeType.PASSIVE, False), - (NodeType.FAIR, True), - (NodeType.FAIR, False), +NODE_TYPE_MODE_BOOT_COMBINATIONS: list[tuple[NodeType, NodeMode, bool]] = [ + (NodeType.SKALE, NodeMode.ACTIVE, False), + (NodeType.SKALE, NodeMode.PASSIVE, False), + (NodeType.FAIR, NodeMode.ACTIVE, True), + (NodeType.FAIR, NodeMode.ACTIVE, False), ] alive_test_params = [ pytest.param( node_type, + node_mode, is_boot, - get_expected_container_names(node_type, is_boot), - id=f'{node_type.name}-boot_{is_boot}-correct_containers', + get_expected_container_names(node_type, node_mode, is_boot), + id=f'{node_type.name}-{node_mode.name}-boot_{is_boot}-correct_containers', ) - for node_type, is_boot in NODE_TYPE_BOOT_COMBINATIONS + for node_type, node_mode, is_boot in NODE_TYPE_MODE_BOOT_COMBINATIONS ] wrong_test_params = [ pytest.param( node_type, + node_mode, is_boot, WRONG_CONTAINERS, - id=f'{node_type.name}-boot_{is_boot}-wrong_containers', + id=f'{node_type.name}-{node_mode.name}-boot_{is_boot}-wrong_containers', ) - for node_type, is_boot in NODE_TYPE_BOOT_COMBINATIONS + for node_type, node_mode, is_boot in NODE_TYPE_MODE_BOOT_COMBINATIONS ] missing_test_params = [] -for node_type, is_boot in NODE_TYPE_BOOT_COMBINATIONS: - expected_names = get_expected_container_names(node_type, is_boot) +for node_type, node_mode, is_boot in NODE_TYPE_MODE_BOOT_COMBINATIONS: + expected_names = get_expected_container_names(node_type, node_mode, is_boot) containers_to_create = expected_names[1:] missing_test_params.append( pytest.param( node_type, + node_mode, is_boot, containers_to_create, - id=f'{node_type.name}-boot_{is_boot}-missing_containers', + id=f'{node_type.name}-{node_mode.name}-boot_{is_boot}-missing_containers', ) ) @@ -87,7 +91,7 @@ def manage_node_containers(request): try: existing_container = dclient.containers.get(name) existing_container.remove(force=True) - except docker.errors.NotFound: + except docker_errors.NotFound: pass container = dclient.containers.run( ALPINE_IMAGE_NAME, @@ -110,50 +114,63 @@ def manage_node_containers(request): try: container_obj.remove(force=True) cleaned_count += 1 - except docker.errors.NotFound: + except docker_errors.NotFound: pass @pytest.mark.parametrize( - 'node_type, is_boot, manage_node_containers', + 'node_type, node_mode, is_boot, manage_node_containers', alive_test_params, indirect=['manage_node_containers'], ) -def test_is_base_containers_alive(manage_node_containers, node_type, is_boot): - assert is_base_containers_alive(node_type=node_type, is_fair_boot=is_boot) is True +def test_is_base_containers_alive(manage_node_containers, node_type, node_mode, is_boot): + assert ( + is_base_containers_alive(node_type=node_type, node_mode=node_mode, is_fair_boot=is_boot) + is True + ) @pytest.mark.parametrize( - 'node_type, is_boot, manage_node_containers', + 'node_type, node_mode, is_boot, manage_node_containers', wrong_test_params, indirect=['manage_node_containers'], ) -def test_is_base_containers_alive_wrong(manage_node_containers, node_type, is_boot): - assert is_base_containers_alive(node_type=node_type, is_fair_boot=is_boot) is False +def test_is_base_containers_alive_wrong(manage_node_containers, node_type, node_mode, is_boot): + assert ( + is_base_containers_alive(node_type=node_type, node_mode=node_mode, is_fair_boot=is_boot) + is False + ) @pytest.mark.parametrize( - 'node_type, is_boot, manage_node_containers', + 'node_type, node_mode, is_boot, manage_node_containers', missing_test_params, indirect=['manage_node_containers'], ) -def test_is_base_containers_alive_missing(manage_node_containers, node_type, is_boot): - assert is_base_containers_alive(node_type=node_type, is_fair_boot=is_boot) is False +def test_is_base_containers_alive_missing(manage_node_containers, node_type, node_mode, is_boot): + assert ( + is_base_containers_alive(node_type=node_type, node_mode=node_mode, is_fair_boot=is_boot) + is False + ) -@pytest.mark.parametrize('node_type, is_boot', NODE_TYPE_BOOT_COMBINATIONS) -def test_is_base_containers_alive_empty(node_type, is_boot): - assert is_base_containers_alive(node_type=node_type, is_fair_boot=is_boot) is False +@pytest.mark.parametrize('node_type, node_mode, is_boot', NODE_TYPE_MODE_BOOT_COMBINATIONS) +def test_is_base_containers_alive_empty(node_type, node_mode, is_boot): + assert ( + is_base_containers_alive(node_type=node_type, node_mode=node_mode, is_fair_boot=is_boot) + is False + ) @pytest.mark.parametrize( ( - 'node_type, test_user_conf, is_boot, inited_node, sync_schains, expected_mnt_dir,' + 'node_type, node_mode, test_user_conf, is_boot, inited_node, sync_schains, expected_mnt_dir,' 'expect_flask_key, expect_backup_run' ), [ ( NodeType.SKALE, + NodeMode.ACTIVE, 'regular_user_conf', False, True, @@ -164,6 +181,7 @@ def test_is_base_containers_alive_empty(node_type, is_boot): ), ( NodeType.SKALE, + NodeMode.ACTIVE, 'regular_user_conf', False, True, @@ -173,7 +191,8 @@ def test_is_base_containers_alive_empty(node_type, is_boot): True, ), ( - NodeType.PASSIVE, + NodeType.SKALE, + NodeMode.PASSIVE, 'passive_user_conf', False, False, @@ -184,6 +203,7 @@ def test_is_base_containers_alive_empty(node_type, is_boot): ), ( NodeType.FAIR, + NodeMode.ACTIVE, 'fair_boot_user_conf', True, True, @@ -194,6 +214,7 @@ def test_is_base_containers_alive_empty(node_type, is_boot): ), ( NodeType.FAIR, + NodeMode.ACTIVE, 'fair_user_conf', False, True, @@ -214,6 +235,7 @@ def test_is_base_containers_alive_empty(node_type, is_boot): def test_compose_node_env( request, node_type, + node_mode, test_user_conf, is_boot, inited_node, @@ -234,6 +256,7 @@ def test_compose_node_env( inited_node=inited_node, sync_schains=sync_schains, node_type=node_type, + node_mode=node_mode, is_fair_boot=is_boot, save=True, ) @@ -244,7 +267,7 @@ def test_compose_node_env( ) == expect_flask_key if expect_flask_key: assert result_env['FLASK_SECRET_KEY'] == 'mock_secret' - should_have_backup = sync_schains and node_type != NodeType.PASSIVE + should_have_backup = sync_schains and node_mode != NodeMode.PASSIVE assert ('BACKUP_RUN' in result_env and result_env['BACKUP_RUN'] == 'True') == should_have_backup @@ -358,14 +381,21 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n assert result is None -@pytest.mark.parametrize('node_type', [NodeType.SKALE, NodeType.PASSIVE, NodeType.FAIR]) +@pytest.mark.parametrize( + 'node_type,node_mode', + [ + (NodeType.SKALE, NodeMode.ACTIVE), + (NodeType.SKALE, NodeMode.PASSIVE), + (NodeType.FAIR, NodeMode.ACTIVE), + ], +) @mock.patch('node_cli.core.node.is_admin_running', return_value=False) @mock.patch('node_cli.core.node.is_api_running', return_value=False) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_admin_and_api_not_running( - mock_requests_get, mock_is_api_running, mock_is_admin_running, node_type + mock_requests_get, mock_is_api_running, mock_is_admin_running, node_type, node_mode ): - assert is_update_safe(node_type=node_type) is True + assert is_update_safe(node_type=node_type, node_mode=node_mode) is True mock_requests_get.assert_not_called() @@ -375,11 +405,18 @@ def test_is_update_safe_when_admin_and_api_not_running( def test_is_update_safe_when_admin_not_running_for_passive( mock_requests_get, mock_is_api_running, mock_is_admin_running ): - assert is_update_safe(node_type=NodeType.PASSIVE) is True + assert is_update_safe(node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) is True mock_requests_get.assert_not_called() -@pytest.mark.parametrize('node_type', [NodeType.SKALE, NodeType.PASSIVE, NodeType.FAIR]) +@pytest.mark.parametrize( + 'node_type,node_mode', + [ + (NodeType.SKALE, NodeMode.ACTIVE), + (NodeType.SKALE, NodeMode.PASSIVE), + (NodeType.FAIR, NodeMode.ACTIVE), + ], +) @pytest.mark.parametrize( 'api_is_safe, expected_result', [(True, True), (False, False)], @@ -388,10 +425,10 @@ def test_is_update_safe_when_admin_not_running_for_passive( @mock.patch('node_cli.core.node.is_admin_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') def test_is_update_safe_when_admin_running( - mock_requests_get, mock_is_admin_running, api_is_safe, expected_result, node_type + mock_requests_get, mock_is_admin_running, api_is_safe, expected_result, node_type, node_mode ): mock_requests_get.return_value = safe_update_api_response(safe=api_is_safe) - assert is_update_safe(node_type=node_type) is expected_result + assert is_update_safe(node_type=node_type, node_mode=node_mode) is expected_result mock_requests_get.assert_called_once() @@ -413,14 +450,23 @@ def test_is_update_safe_when_only_api_running_for_regular( node_type, ): mock_requests_get.return_value = safe_update_api_response(safe=api_is_safe) - assert is_update_safe(node_type=node_type) is expected_result + assert is_update_safe(node_type=node_type, node_mode=NodeMode.ACTIVE) is expected_result mock_requests_get.assert_called_once() -@pytest.mark.parametrize('node_type', [NodeType.SKALE, NodeType.PASSIVE, NodeType.FAIR]) +@pytest.mark.parametrize( + 'node_type,node_mode', + [ + (NodeType.SKALE, NodeMode.ACTIVE), + (NodeType.SKALE, NodeMode.PASSIVE), + (NodeType.FAIR, NodeMode.ACTIVE), + ], +) @mock.patch('node_cli.core.node.is_admin_running', return_value=True) @mock.patch('node_cli.utils.helper.requests.get') -def test_is_update_safe_when_api_call_fails(mock_requests_get, mock_is_admin_running, node_type): +def test_is_update_safe_when_api_call_fails( + mock_requests_get, mock_is_admin_running, node_type, node_mode +): mock_requests_get.side_effect = requests.exceptions.ConnectionError('Test connection error') - assert is_update_safe(node_type=node_type) is False + assert is_update_safe(node_type=node_type, node_mode=node_mode) is False mock_requests_get.assert_called_once() diff --git a/tests/core/nginx_test.py b/tests/core/nginx_test.py index c24645dc..a867b0b0 100644 --- a/tests/core/nginx_test.py +++ b/tests/core/nginx_test.py @@ -11,7 +11,7 @@ SSL_KEY_NAME, SSL_CRT_NAME, ) -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode from node_cli.configs import NGINX_TEMPLATE_FILEPATH, NGINX_CONFIG_FILEPATH, NODE_CERTS_PATH TEST_NGINX_TEMPLATE = """ @@ -57,14 +57,14 @@ def nginx_template(): @pytest.mark.parametrize( - 'node_type, ssl_exists, expected_regular_flag, expected_ssl_flag', + 'node_type, node_mode, ssl_exists, expected_regular_flag, expected_ssl_flag', [ - (NodeType.SKALE, True, True, True), - (NodeType.SKALE, False, True, False), - (NodeType.PASSIVE, True, True, True), - (NodeType.PASSIVE, False, True, False), - (NodeType.FAIR, True, False, True), - (NodeType.FAIR, False, False, False), + (NodeType.SKALE, NodeMode.ACTIVE, True, True, True), + (NodeType.SKALE, NodeMode.ACTIVE, False, True, False), + (NodeType.SKALE, NodeMode.PASSIVE, True, True, True), + (NodeType.SKALE, NodeMode.PASSIVE, False, True, False), + (NodeType.FAIR, NodeMode.ACTIVE, True, False, True), + (NodeType.FAIR, NodeMode.ACTIVE, False, False, False), ], ids=[ 'regular_ssl_on', @@ -81,6 +81,7 @@ def test_generate_nginx_config( mock_type, mock_check_ssl, node_type, + node_mode, ssl_exists, expected_regular_flag, expected_ssl_flag, @@ -130,15 +131,15 @@ def test_check_ssl_certs_missing_both(ssl_folder): @pytest.mark.parametrize( - 'node_type, expected_result', + 'node_type, node_mode, expected_result', [ - (NodeType.SKALE, True), - (NodeType.PASSIVE, True), - (NodeType.FAIR, False), + (NodeType.SKALE, NodeMode.ACTIVE, True), + (NodeType.SKALE, NodeMode.PASSIVE, True), + (NodeType.FAIR, NodeMode.ACTIVE, False), ], ) @mock.patch('node_cli.core.nginx.TYPE') -def test_is_skale_node_nginx(mock_type, node_type, expected_result): +def test_is_skale_node_nginx(mock_type, node_type, node_mode, expected_result): mock_type.__eq__.side_effect = lambda other: node_type == other mock_type.__ne__.side_effect = lambda other: node_type != other diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index a4f1423c..897a642c 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -6,15 +6,16 @@ from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.fair.boot import init as init_boot from node_cli.fair.boot import update -from node_cli.fair import cleanup, migrate_from_boot, restore +from node_cli.fair.common import cleanup +from node_cli.fair.active import migrate_from_boot, restore from node_cli.operations.fair import FairUpdateType from node_cli.utils.node_type import NodeType -@mock.patch('node_cli.fair.fair_node.time.sleep') -@mock.patch('node_cli.fair.fair_node.restore_fair_op') -@mock.patch('node_cli.fair.fair_node.save_env_params') -@mock.patch('node_cli.fair.fair_node.compose_node_env') +@mock.patch('node_cli.fair.active.time.sleep') +@mock.patch('node_cli.fair.active.restore_fair_op') +@mock.patch('node_cli.fair.active.save_env_params') +@mock.patch('node_cli.fair.active.compose_node_env') def test_restore_fair( mock_compose_env, mock_save_env, @@ -37,10 +38,10 @@ def test_restore_fair( mock_sleep.assert_called_once() -@mock.patch('node_cli.fair.fair_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.fair.fair_boot.time.sleep') -@mock.patch('node_cli.fair.fair_boot.init_fair_boot_op') -@mock.patch('node_cli.fair.fair_boot.compose_node_env') +@mock.patch('node_cli.fair.boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.fair.boot.time.sleep') +@mock.patch('node_cli.fair.boot.init_fair_boot_op') +@mock.patch('node_cli.fair.boot.compose_node_env') def test_init_fair_boot( mock_compose_env, mock_init_op, @@ -65,10 +66,10 @@ def test_init_fair_boot( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.fair_boot.is_base_containers_alive', return_value=True) -@mock.patch('node_cli.fair.fair_boot.time.sleep') -@mock.patch('node_cli.fair.fair_boot.update_fair_boot_op') -@mock.patch('node_cli.fair.fair_boot.compose_node_env') +@mock.patch('node_cli.fair.boot.is_base_containers_alive', return_value=True) +@mock.patch('node_cli.fair.boot.time.sleep') +@mock.patch('node_cli.fair.boot.update_fair_boot_op') +@mock.patch('node_cli.fair.boot.compose_node_env') def test_update_fair_boot( mock_compose_env, mock_update_op, @@ -100,8 +101,8 @@ def test_update_fair_boot( mock_is_alive.assert_called_once_with(node_type=NodeType.FAIR, is_fair_boot=True) -@mock.patch('node_cli.fair.fair_node.update_fair_op') -@mock.patch('node_cli.fair.fair_node.compose_node_env') +@mock.patch('node_cli.fair.active.update_fair_op') +@mock.patch('node_cli.fair.active.compose_node_env') @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) def test_migrate_from_boot( mock_is_user_valid, @@ -130,9 +131,9 @@ def test_migrate_from_boot( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') -@mock.patch('node_cli.fair.fair_node.cleanup_fair_op') -@mock.patch('node_cli.fair.fair_node.compose_node_env') +@mock.patch('node_cli.fair.common.cleanup_docker_configuration') +@mock.patch('node_cli.fair.common.cleanup_fair_op') +@mock.patch('node_cli.fair.common.compose_node_env') def test_cleanup_success( mock_compose_env, mock_cleanup_fair_op, @@ -155,9 +156,9 @@ def test_cleanup_success( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') -@mock.patch('node_cli.fair.fair_node.cleanup_fair_op') -@mock.patch('node_cli.fair.fair_node.compose_node_env') +@mock.patch('node_cli.fair.common.cleanup_docker_configuration') +@mock.patch('node_cli.fair.common.cleanup_fair_op') +@mock.patch('node_cli.fair.common.compose_node_env') def test_cleanup_calls_operations_in_correct_order( mock_compose_env, mock_cleanup_fair_op, @@ -167,7 +168,7 @@ def test_cleanup_calls_operations_in_correct_order( resource_alloc, meta_file_v3, ): - from node_cli.fair.fair_node import cleanup + from node_cli.fair.common import cleanup mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env @@ -188,9 +189,9 @@ def test_cleanup_calls_operations_in_correct_order( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') -@mock.patch('node_cli.fair.fair_node.cleanup_fair_op', side_effect=Exception('Cleanup failed')) -@mock.patch('node_cli.fair.fair_node.compose_node_env') +@mock.patch('node_cli.fair.common.cleanup_docker_configuration') +@mock.patch('node_cli.fair.common.cleanup_fair_op', side_effect=Exception('Cleanup failed')) +@mock.patch('node_cli.fair.common.compose_node_env') def test_cleanup_continues_after_fair_op_error( mock_compose_env, mock_cleanup_fair_op, @@ -221,7 +222,7 @@ def test_cleanup_fails_when_user_invalid( """Test that cleanup fails when user validation fails""" import pytest - from node_cli.fair import cleanup + from node_cli.fair.common import cleanup with pytest.raises(SystemExit): cleanup() @@ -235,10 +236,10 @@ def test_cleanup_fails_when_not_inited(ensure_meta_removed): @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.fair_node.cleanup_docker_configuration') -@mock.patch('node_cli.fair.fair_node.cleanup_fair_op') -@mock.patch('node_cli.fair.fair_node.compose_node_env') -@mock.patch('node_cli.fair.fair_node.logger') +@mock.patch('node_cli.fair.common.cleanup_docker_configuration') +@mock.patch('node_cli.fair.common.cleanup_fair_op') +@mock.patch('node_cli.fair.common.compose_node_env') +@mock.patch('node_cli.fair.common.logger') def test_cleanup_logs_success_message( mock_logger, mock_compose_env, @@ -260,8 +261,8 @@ def test_cleanup_logs_success_message( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.fair_node.post_request') -@mock.patch('node_cli.fair.fair_node.is_node_inited', return_value=True) +@mock.patch('node_cli.fair.active.post_request') +@mock.patch('node_cli.fair.active.is_node_inited', return_value=True) def test_exit_success( mock_is_inited, mock_post_request, @@ -270,7 +271,7 @@ def test_exit_success( resource_alloc, meta_file_v3, ): - from node_cli.fair import exit + from node_cli.fair.active import exit mock_post_request.return_value = ('ok', {}) @@ -280,9 +281,9 @@ def test_exit_success( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.fair_node.error_exit') -@mock.patch('node_cli.fair.fair_node.post_request') -@mock.patch('node_cli.fair.fair_node.is_node_inited', return_value=True) +@mock.patch('node_cli.fair.active.error_exit') +@mock.patch('node_cli.fair.active.post_request') +@mock.patch('node_cli.fair.active.is_node_inited', return_value=True) def test_exit_error( mock_is_inited, mock_post_request, @@ -292,7 +293,7 @@ def test_exit_error( resource_alloc, meta_file_v3, ): - from node_cli.fair import exit + from node_cli.fair.active import exit error_msg = 'Exit failed' mock_post_request.return_value = ('error', error_msg) @@ -304,7 +305,7 @@ def test_exit_error( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.fair_node.is_node_inited', return_value=False) +@mock.patch('node_cli.fair.active.is_node_inited', return_value=False) def test_exit_not_inited( mock_is_inited, mock_is_user_valid, @@ -313,7 +314,7 @@ def test_exit_not_inited( meta_file_v3, capsys, ): - from node_cli.fair import exit + from node_cli.fair.active import exit exit() From 64e8db7d289776a05f95a9950e3b014e5d74c763 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 11 Aug 2025 13:25:02 +0100 Subject: [PATCH 185/332] Replace get_node_mode with upsert_node_mode, update fair and skale node modules --- node_cli/cli/fair_node.py | 1 + node_cli/cli/node.py | 6 +++++- node_cli/cli/passive_fair_node.py | 1 + node_cli/core/node.py | 9 +++++---- node_cli/core/node_options.py | 19 +++++++++++++++++-- node_cli/fair/boot.py | 4 ++-- node_cli/fair/common.py | 14 +++++++++----- node_cli/operations/fair.py | 2 ++ tests/core/core_node_test.py | 4 ++-- 9 files changed, 44 insertions(+), 16 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 6013b44e..07533802 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -92,6 +92,7 @@ def register(ip: str) -> None: @streamed_cmd def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: bool): update_fair( + node_mode=NodeMode.ACTIVE, env_filepath=env_filepath, pull_config_for_schain=pull_config_for_schain, force_skaled_start=force_skaled_start, diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 61d71af7..20e2d701 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -38,8 +38,10 @@ ) from node_cli.configs import DEFAULT_NODE_BASE_PORT from node_cli.configs.user import ALLOWED_ENV_TYPES +from node_cli.core.node_options import upsert_node_mode from node_cli.utils.decorators import check_inited from node_cli.utils.helper import abort_if_false, streamed_cmd, IP_TYPE +from node_cli.utils.node_type import NodeMode from node_cli.utils.texts import safe_load_texts from node_cli.utils.meta import CliMetaManager from node_cli.utils.print_formatters import print_meta_info @@ -102,6 +104,7 @@ def init_node(env_file): @streamed_cmd def update_node(env_file, pull_config_for_schain, unsafe_ok): update( + node_mode=NodeMode.ACTIVE, env_filepath=env_file, pull_config_for_schain=pull_config_for_schain, node_type=TYPE, @@ -228,7 +231,8 @@ def _set_domain_name(domain): help='Network to check', ) def check(network): - run_checks(node_type=TYPE, network=network) + node_mode = upsert_node_mode() + run_checks(node_type=TYPE, node_mode=node_mode, network=network) @node.command(help='Reconfigure nftables rules') diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index 6c63bff7..f1b79085 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -67,6 +67,7 @@ def init_passive_node(env_filepath: str): @streamed_cmd def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: bool): update_fair( + node_mode=NodeMode.PASSIVE, env_filepath=env_filepath, pull_config_for_schain=pull_config_for_schain, force_skaled_start=force_skaled_start, diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 7577f166..79ac63c4 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -50,7 +50,7 @@ from node_cli.core.node_options import ( active_fair, active_skale, - get_node_mode, + upsert_node_mode, passive_skale, passive_fair, ) @@ -293,9 +293,10 @@ def update( env_filepath: str, pull_config_for_schain: Optional[str], node_type: NodeType, + node_mode: NodeMode, unsafe_ok: bool = False, ) -> None: - node_mode = get_node_mode() + node_mode = upsert_node_mode(node_mode=node_mode) if not unsafe_ok and not is_update_safe(node_type=node_type, node_mode=node_mode): error_msg = 'Cannot update safely' @@ -420,7 +421,7 @@ def set_maintenance_mode_off(): @check_inited @check_user def turn_off(node_type: NodeType, maintenance_on: bool = False, unsafe_ok: bool = False) -> None: - node_mode = get_node_mode() + node_mode = upsert_node_mode() if not unsafe_ok and not is_update_safe(node_type=node_type, node_mode=node_mode): error_msg = 'Cannot turn off safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) @@ -435,7 +436,7 @@ def turn_off(node_type: NodeType, maintenance_on: bool = False, unsafe_ok: bool @check_inited @check_user def turn_on(maintenance_off, sync_schains, env_file, node_type: NodeType) -> None: - node_mode = get_node_mode() + node_mode = upsert_node_mode() env = compose_node_env( env_file, inited_node=True, diff --git a/node_cli/core/node_options.py b/node_cli/core/node_options.py index 889978fe..49b49a4e 100644 --- a/node_cli/core/node_options.py +++ b/node_cli/core/node_options.py @@ -89,9 +89,24 @@ def mark_passive_node() -> None: logger.info('Node marked as passive.') -def get_node_mode() -> NodeMode: +class NodeModeMismatchError(Exception): + pass + + +def upsert_node_mode(node_mode: NodeMode | None = None) -> NodeMode: node_options = NodeOptions() - return node_options.node_mode + try: + options_mode = node_options.node_mode + if options_mode != node_mode: + raise NodeModeMismatchError( + f'Cannot change node mode from {options_mode} to {node_mode}' + ) + return options_mode + except ValueError: + if node_mode is None: + raise NodeModeMismatchError('Node mode is not set') + node_options.node_mode = node_mode + return node_mode def active_skale(node_type: NodeType, node_mode: NodeMode) -> bool: diff --git a/node_cli/fair/boot.py b/node_cli/fair/boot.py index b8da120c..9acf9b63 100644 --- a/node_cli/fair/boot.py +++ b/node_cli/fair/boot.py @@ -23,7 +23,7 @@ from node_cli.configs import TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.core.node_options import get_node_mode +from node_cli.core.node_options import upsert_node_mode from node_cli.operations import init_fair_boot_op, update_fair_boot_op from node_cli.utils.decorators import check_inited, check_not_inited, check_user from node_cli.utils.exit_codes import CLIExitCodes @@ -57,7 +57,7 @@ def init(env_filepath: str) -> None: @check_user def update(env_filepath: str, pull_config_for_schain: str) -> None: logger.info('Fair boot node update started') - node_mode = get_node_mode() + node_mode = upsert_node_mode(node_mode=NodeMode.ACTIVE) env = compose_node_env( env_filepath, inited_node=True, diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 9e0163f6..8af1f017 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -24,7 +24,7 @@ from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.node import compose_node_env, is_base_containers_alive -from node_cli.core.node_options import get_node_mode +from node_cli.core.node_options import upsert_node_mode from node_cli.operations import ( FairUpdateType, cleanup_fair_op, @@ -61,7 +61,7 @@ def init(node_mode: NodeMode, env_filepath: str) -> None: @check_user def cleanup() -> None: - node_mode = get_node_mode() + node_mode = upsert_node_mode() env = compose_node_env( SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode ) @@ -73,7 +73,10 @@ def cleanup() -> None: @check_inited @check_user def update( - env_filepath: str, pull_config_for_schain: str | None = None, force_skaled_start: bool = False + node_mode: NodeMode, + env_filepath: str, + pull_config_for_schain: str | None = None, + force_skaled_start: bool = False, ) -> None: logger.info( 'Updating fair node: %s, pull_config_for_schain: %s, force_skaled_start: %s', @@ -81,7 +84,8 @@ def update( pull_config_for_schain, force_skaled_start, ) - node_mode = get_node_mode() + node_mode = upsert_node_mode(node_mode=node_mode) + env = compose_node_env( env_filepath, inited_node=True, @@ -106,7 +110,7 @@ def update( def repair_chain(snapshot_from: str = 'any') -> None: - node_mode = get_node_mode() + node_mode = upsert_node_mode() env = compose_node_env( SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode ) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index a35854a3..20f70374 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -38,6 +38,7 @@ from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_no_lvm_datadir from node_cli.core.static_config import get_fair_chain_name +from node_cli.core.node_options import upsert_node_mode from node_cli.fair.record.chain_record import ( get_fair_chain_record, migrate_chain_record, @@ -93,6 +94,7 @@ def init(node_mode: NodeMode, env_filepath: str, env: dict) -> bool: prepare_host(env_filepath, env_type=env['ENV_TYPE']) link_env_file() + upsert_node_mode(node_mode=node_mode) prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index b35f7b64..6535562e 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -164,8 +164,8 @@ def test_is_base_containers_alive_empty(node_type, node_mode, is_boot): @pytest.mark.parametrize( ( - 'node_type, node_mode, test_user_conf, is_boot, inited_node, sync_schains, expected_mnt_dir,' - 'expect_flask_key, expect_backup_run' + 'node_type, node_mode, test_user_conf, is_boot, inited_node, sync_schains,' + 'expected_mnt_dir, expect_flask_key, expect_backup_run' ), [ ( From 8426a49d44221efb7a7630a16344d5bd5a8d55dc Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 11 Aug 2025 13:49:35 +0100 Subject: [PATCH 186/332] Bump version, update node tests --- node_cli/cli/__init__.py | 2 +- node_cli/operations/base.py | 4 ++-- tests/core/core_node_test.py | 1 + tests/fair/fair_node_test.py | 5 +++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/node_cli/cli/__init__.py b/node_cli/cli/__init__.py index b0105764..e2eb22f4 100644 --- a/node_cli/cli/__init__.py +++ b/node_cli/cli/__init__.py @@ -1,4 +1,4 @@ -__version__ = '3.0.0' +__version__ = '3.1.0' if __name__ == '__main__': print(__version__) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 5168fab1..b9d2cc09 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -43,9 +43,9 @@ from node_cli.core.nginx import generate_nginx_config from node_cli.core.node_options import ( NodeOptions, - get_node_mode, mark_active_node, mark_passive_node, + upsert_node_mode, ) from node_cli.core.resources import init_shared_space_volume, update_resource_allocation from node_cli.core.schains import ( @@ -382,7 +382,7 @@ def turn_on(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: def restore(env, backup_path, node_type: NodeType, config_only=False): - node_mode = get_node_mode() + node_mode = upsert_node_mode(node_mode=NodeMode.ACTIVE) unpack_backup_archive(backup_path) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 6535562e..d84ddc37 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -374,6 +374,7 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() ): # noqa result = update( + node_mode=NodeMode.ACTIVE, regular_user_conf.as_posix(), pull_config_for_schain=None, node_type=NodeType.SKALE, diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 897a642c..685bbbf1 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -9,7 +9,7 @@ from node_cli.fair.common import cleanup from node_cli.fair.active import migrate_from_boot, restore from node_cli.operations.fair import FairUpdateType -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeMode, NodeType @mock.patch('node_cli.fair.active.time.sleep') @@ -34,7 +34,8 @@ def test_restore_fair( mock_compose_env.assert_called_once_with(valid_env_file, node_type=NodeType.FAIR) mock_save_env.assert_called_once_with(valid_env_file) expected_env = {**mock_env, 'SKALE_DIR': SKALE_DIR} - mock_restore_op.assert_called_once_with(expected_env, backup_path, config_only=False) + mock_restore_op.assert_called_once_with( + node_mode=NodeMode.ACTIVE, expected_env, backup_path, config_only=False) mock_sleep.assert_called_once() From 7e96e12dc906b714eb489d2d9f2e692f44093ed8 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 11 Aug 2025 16:24:16 +0100 Subject: [PATCH 187/332] Add active_node_option fixture, update tests --- tests/conftest.py | 22 ++++++++++++++++++++++ tests/core/core_node_test.py | 2 +- tests/fair/fair_node_test.py | 7 +++++-- 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ba860af9..97e7c674 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -43,8 +43,10 @@ from node_cli.configs.node_options import NODE_OPTIONS_FILEPATH from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH from node_cli.configs.ssl import SSL_FOLDER_PATH +from node_cli.core.node_options import NodeOptions from node_cli.utils.docker_utils import docker_client from node_cli.utils.global_config import generate_g_config_file +from node_cli.utils.node_type import NodeMode from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3, TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN @@ -144,6 +146,26 @@ def ssl_folder(): shutil.rmtree(SSL_FOLDER_PATH) +@pytest.fixture +def active_node_option(): + node_options = NodeOptions() + node_options.node_mode = NodeMode.ACTIVE + try: + yield + finally: + shutil.rmtree(NODE_OPTIONS_FILEPATH) + + +@pytest.fixture +def passive_node_option(): + node_options = NodeOptions() + node_options.node_mode = NodeMode.PASSIVE + try: + yield + finally: + shutil.rmtree(NODE_OPTIONS_FILEPATH) + + @pytest.fixture def dutils(): return docker_client() diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index d84ddc37..70671418 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -374,10 +374,10 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() ): # noqa result = update( - node_mode=NodeMode.ACTIVE, regular_user_conf.as_posix(), pull_config_for_schain=None, node_type=NodeType.SKALE, + node_mode=NodeMode.ACTIVE, ) assert result is None diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 685bbbf1..861267ca 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -35,7 +35,8 @@ def test_restore_fair( mock_save_env.assert_called_once_with(valid_env_file) expected_env = {**mock_env, 'SKALE_DIR': SKALE_DIR} mock_restore_op.assert_called_once_with( - node_mode=NodeMode.ACTIVE, expected_env, backup_path, config_only=False) + NodeMode.ACTIVE, expected_env, backup_path, config_only=False + ) mock_sleep.assert_called_once() @@ -168,6 +169,7 @@ def test_cleanup_calls_operations_in_correct_order( inited_node, resource_alloc, meta_file_v3, + active_node_option, ): from node_cli.fair.common import cleanup @@ -229,7 +231,7 @@ def test_cleanup_fails_when_user_invalid( cleanup() -def test_cleanup_fails_when_not_inited(ensure_meta_removed): +def test_cleanup_fails_when_not_inited(ensure_meta_removed, active_node_option): import pytest with pytest.raises(SystemExit): @@ -250,6 +252,7 @@ def test_cleanup_logs_success_message( inited_node, resource_alloc, meta_file_v3, + active_node_option, ): mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env From 6a1bd8b1f551f96dbf313bdd6cd538813ee398d7 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 11 Aug 2025 18:36:48 +0100 Subject: [PATCH 188/332] Fix fair node commands --- node_cli/fair/active.py | 4 ++-- node_cli/fair/common.py | 6 +++--- node_cli/operations/fair.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/node_cli/fair/active.py b/node_cli/fair/active.py index 5ab1acfe..963686fe 100644 --- a/node_cli/fair/active.py +++ b/node_cli/fair/active.py @@ -74,9 +74,9 @@ def migrate_from_boot( node_mode=NodeMode.ACTIVE, ) migrate_ok = update_fair_op( + env_filepath, + env, node_mode=NodeMode.ACTIVE, - env_filepath=env_filepath, - env=env, update_type=FairUpdateType.FROM_BOOT, force_skaled_start=False, ) diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 8af1f017..52524421 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -52,7 +52,7 @@ def init(node_mode: NodeMode, env_filepath: str) -> None: save_env_params(env_filepath) env['SKALE_DIR'] = SKALE_DIR - init_ok = init_fair_op(node_mode, env_filepath, env) + init_ok = init_fair_op(env_filepath, env, node_mode=node_mode) if not init_ok: error_exit('Init operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) time.sleep(RESTORE_SLEEP_TIMEOUT) @@ -95,9 +95,9 @@ def update( pull_config_for_schain=pull_config_for_schain, ) update_ok = update_fair_op( + env_filepath, + env, node_mode=node_mode, - env_filepath=env_filepath, - env=env, update_type=FairUpdateType.REGULAR, force_skaled_start=force_skaled_start, ) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 20f70374..d3a04e20 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -79,7 +79,7 @@ class FairUpdateType(Enum): @checked_host -def init(node_mode: NodeMode, env_filepath: str, env: dict) -> bool: +def init(env_filepath: str, env: dict, node_mode: NodeMode) -> bool: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -156,9 +156,9 @@ def update_fair_boot(env_filepath: str, env: dict) -> bool: @checked_host def update( - node_mode: NodeMode, env_filepath: str, env: dict, + node_mode: NodeMode, update_type: FairUpdateType, force_skaled_start: bool, ) -> bool: From 1bb430eb22bc12fac23681796b5462a14562033e Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 11 Aug 2025 21:42:06 +0100 Subject: [PATCH 189/332] Remove resource allocation cli, add passive node setup command --- README.md | 32 --------- node_cli/cli/passive_node.py | 14 ++-- node_cli/cli/resources_allocation.py | 64 ----------------- node_cli/configs/user.py | 4 +- node_cli/core/node_config.py | 35 ---------- node_cli/core/node_options.py | 4 +- node_cli/core/resources.py | 7 +- node_cli/fair/passive.py | 21 ++++++ node_cli/main.py | 2 - tests/cli/resources_allocation_test.py | 96 -------------------------- tests/conftest.py | 9 +++ text.yml | 5 +- 12 files changed, 52 insertions(+), 241 deletions(-) delete mode 100644 node_cli/cli/resources_allocation.py delete mode 100644 node_cli/core/node_config.py delete mode 100644 tests/cli/resources_allocation_test.py diff --git a/README.md b/README.md index 3b8b1b88..7484ad2b 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,6 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 5. [Health commands (Standard)](#health-commands-standard) 6. [SSL commands (Standard)](#ssl-commands-standard) 7. [Logs commands (Standard)](#logs-commands-standard) - 8. [Resources allocation commands (Standard)](#resources-allocation-commands-standard) 3. [Passive Node Usage (`skale` - Passive Build)](#passive-node-usage-skale---passive-build) 1. [Top level commands (Passive)](#top-level-commands-passive) 2. [Passive node commands](#passive-node-commands) @@ -519,37 +518,6 @@ Options: * `--container`, `-c` - Dump logs only from specified container. -### Resources allocation commands (Standard) - -> Prefix: `skale resources-allocation` - -Manage the resources allocation file for the standard node. - -#### Show allocation file - -Show resources allocation file: - -```shell -skale resources-allocation show -``` - -#### Generate/update allocation file - -Generate/update allocation file: - -```shell -skale resources-allocation generate [ENV_FILE] [--yes] [-f/--force] -``` - -Arguments: - -* `ENV_FILE` - path to .env file (required parameters are listed in the `skale node init` command). - -Options: - -* `--yes` - generate without additional confirmation. -* `-f/--force` - rewrite allocation file if it exists. - *** ## Passive Node Usage (`skale` - Passive Build) diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index c24fe0b5..4e9583f1 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -22,12 +22,8 @@ import click from node_cli.core.node import init_passive, update_passive, cleanup_passive -from node_cli.utils.helper import ( - abort_if_false, - error_exit, - streamed_cmd, - URL_TYPE, -) +from node_cli.fair.passive import setup_fair_passive +from node_cli.utils.helper import abort_if_false, error_exit, streamed_cmd, URL_TYPE, IP_TYPE from node_cli.utils.texts import safe_load_texts @@ -88,3 +84,9 @@ def _update_passive(env_file, unsafe_ok): @streamed_cmd def _cleanup_passive() -> None: cleanup_passive() + + +@passive_node.command('setup', help=TEXTS['setup']['help']) +@click.option('--id', required=True, type=int, help=TEXTS['setup']['id']) +def _setup(id: int) -> None: + setup_fair_passive(node_id=id) diff --git a/node_cli/cli/resources_allocation.py b/node_cli/cli/resources_allocation.py deleted file mode 100644 index a6b2e185..00000000 --- a/node_cli/cli/resources_allocation.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of node-cli -# -# Copyright (C) 2019 SKALE Labs -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import json -import click - -from node_cli.core.resources import ( - get_resource_allocation_info, - generate_resource_allocation_config, -) -from node_cli.utils.helper import abort_if_false -from node_cli.utils.texts import safe_load_texts -from node_cli.utils.node_type import NodeType - -TEXTS = safe_load_texts() - - -@click.group() -def resources_allocation_cli(): - pass - - -@resources_allocation_cli.group(help='Resources allocation commands') -def resources_allocation(): - pass - - -@resources_allocation.command('show', help='Show resources allocation file') -def show(): - resource_allocation_info = get_resource_allocation_info() - if resource_allocation_info: - print(json.dumps(resource_allocation_info, indent=4)) - else: - print('No resources allocation file on this machine') - - -@resources_allocation.command('generate', help='Generate/update resources allocation file') -@click.argument('env_file') -@click.option( - '--yes', - is_flag=True, - callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to generate/update resource allocation file?', -) -@click.option('--force', '-f', is_flag=True, help='Rewrite if already exists') -def generate(env_file, force): - generate_resource_allocation_config(node_type=NodeType.SKALE, env_file=env_file, force=force) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index b094150d..2fd8145e 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -168,7 +168,9 @@ def get_validated_user_config( def validate_user_config(user_config: BaseUserConfig) -> None: validate_env_type(env_type=user_config.env_type) - if not isinstance(user_config, FairUserConfig): + if not isinstance(user_config, FairUserConfig) and not isinstance( + user_config, PassiveFairUserConfig + ): validate_alias_or_address( user_config.manager_contracts, ContractType.MANAGER, user_config.endpoint ) diff --git a/node_cli/core/node_config.py b/node_cli/core/node_config.py deleted file mode 100644 index c7050918..00000000 --- a/node_cli/core/node_config.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of node-cli -# -# Copyright (C) 2021 SKALE Labs -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - - -class NodeConfig: - def __init__(self, config_filepath, env_filepath=None): - pass - - def load_env(self): - pass - - def validate_env(self): - pass - - def load_config(self): - pass - - def validate_config(self): - pass diff --git a/node_cli/core/node_options.py b/node_cli/core/node_options.py index 49b49a4e..fc766f3a 100644 --- a/node_cli/core/node_options.py +++ b/node_cli/core/node_options.py @@ -71,7 +71,7 @@ def node_mode(self) -> NodeMode: @node_mode.setter def node_mode(self, node_mode: NodeMode) -> None: - return self._set('node_mode', node_mode.name) + return self._set('node_mode', node_mode.value) def all(self) -> dict: return read_json(self.filepath) @@ -97,7 +97,7 @@ def upsert_node_mode(node_mode: NodeMode | None = None) -> NodeMode: node_options = NodeOptions() try: options_mode = node_options.node_mode - if options_mode != node_mode: + if node_mode is not None and options_mode != node_mode: raise NodeModeMismatchError( f'Cannot change node mode from {options_mode} to {node_mode}' ) diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 216aa657..e62a0f6f 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -28,7 +28,7 @@ from node_cli.utils.docker_utils import ensure_volume from node_cli.utils.schain_types import SchainTypes from node_cli.utils.helper import write_json, read_json, run_cmd, safe_load_yml -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode from node_cli.configs import ALLOCATION_FILEPATH, STATIC_PARAMS_FILEPATH, SNAPSHOTS_SHARED_VOLUME from node_cli.configs.resource_allocation import ( RESOURCE_ALLOCATION_FILEPATH, @@ -95,6 +95,7 @@ def compose_resource_allocation_config(env_type: str, params_by_env_type: Dict = def generate_resource_allocation_config( env_file, node_type: NodeType, + node_mode: NodeMode, force=False, ) -> None: if not force and os.path.isfile(RESOURCE_ALLOCATION_FILEPATH): @@ -102,7 +103,9 @@ def generate_resource_allocation_config( logger.debug(msg) print(msg) return - user_config = get_validated_user_config(node_type=node_type, env_filepath=env_file) + user_config = get_validated_user_config( + node_type=node_type, node_mode=node_mode, env_filepath=env_file + ) logger.info('Generating resource allocation file ...') try: update_resource_allocation(user_config.env_type) diff --git a/node_cli/fair/passive.py b/node_cli/fair/passive.py index be047233..dcd175d9 100644 --- a/node_cli/fair/passive.py +++ b/node_cli/fair/passive.py @@ -19,7 +19,28 @@ import logging +from node_cli.core.host import is_node_inited from node_cli.utils.texts import safe_load_texts +from node_cli.utils.helper import error_exit, post_request +from node_cli.utils.exit_codes import CLIExitCodes logger = logging.getLogger(__name__) TEXTS = safe_load_texts() +BLUEPRINT_NAME = 'fair-node-passive' + + +def setup_fair_passive(node_id: int) -> None: + if not is_node_inited(): + print(TEXTS['fair']['node']['not_inited']) + return + + json_data = {'id': node_id} + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='setup', json=json_data) + if status == 'ok': + msg = TEXTS['fair']['node']['setup_complete'] + logger.info(msg) + print(msg) + else: + error_msg = payload + logger.error(f'Setup error {error_msg}') + error_exit(error_msg, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/node_cli/main.py b/node_cli/main.py index a2d3aba9..8e734d17 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -36,7 +36,6 @@ from node_cli.cli.schains import schains_cli from node_cli.cli.wallet import wallet_cli from node_cli.cli.ssl import ssl_cli -from node_cli.cli.resources_allocation import resources_allocation_cli from node_cli.cli.passive_node import passive_node_cli from node_cli.cli.fair_boot import fair_boot_cli from node_cli.cli.fair_node import fair_node_cli @@ -102,7 +101,6 @@ def get_sources_list() -> List[click.MultiCommand]: health_cli, schains_cli, logs_cli, - resources_allocation_cli, node_cli, passive_node_cli, wallet_cli, diff --git a/tests/cli/resources_allocation_test.py b/tests/cli/resources_allocation_test.py deleted file mode 100644 index 03b2e73b..00000000 --- a/tests/cli/resources_allocation_test.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of node-cli -# -# Copyright (C) 2019 SKALE Labs -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import json -import os - -import mock -import pytest -import requests - -from node_cli.cli.resources_allocation import generate, show -from node_cli.configs.resource_allocation import NODE_DATA_PATH, RESOURCE_ALLOCATION_FILEPATH -from node_cli.utils.helper import safe_mkdir, write_json -from node_cli.utils.node_type import NodeType -from tests.helper import response_mock, run_command_mock -from tests.resources_test import BIG_DISK_SIZE - -TEST_CONFIG = {'test': 1} - - -@pytest.fixture -def resource_alloc_config(): - write_json(RESOURCE_ALLOCATION_FILEPATH, TEST_CONFIG) - yield RESOURCE_ALLOCATION_FILEPATH - os.remove(RESOURCE_ALLOCATION_FILEPATH) - - -def test_show(resource_alloc_config): - resp_mock = response_mock(requests.codes.created) - write_json(RESOURCE_ALLOCATION_FILEPATH, TEST_CONFIG) - result = run_command_mock('node_cli.utils.helper.post_request', resp_mock, show) - assert result.output == json.dumps(TEST_CONFIG, indent=4) + '\n' - assert result.exit_code == 0 - - -def test_generate(regular_user_conf): - safe_mkdir(NODE_DATA_PATH) - resp_mock = response_mock(requests.codes.created) - with ( - mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.configs.user.validate_alias_or_address'), - ): - result = run_command_mock( - 'node_cli.utils.helper.post_request', - resp_mock, - generate, - [regular_user_conf.as_posix(), '--yes'], - ) - assert result.output == ( - f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}\n' - ) - assert result.exit_code == 0 - - -def test_generate_already_exists(regular_user_conf, resource_alloc_config): - resp_mock = response_mock(requests.codes.created) - with ( - mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), - mock.patch('node_cli.configs.user.validate_alias_or_address'), - ): - result = run_command_mock( - 'node_cli.utils.helper.post_request', - resp_mock, - generate, - [regular_user_conf.as_posix(), '--yes'], - ) - assert result.output == 'Resource allocation file already exists\n' - assert result.exit_code == 0 - - result = run_command_mock( - 'node_cli.utils.helper.post_request', - resp_mock, - generate, - [regular_user_conf.as_posix(), '--yes', '--force'], - ) - assert result.output == ( - f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}\n' - ) - assert result.exit_code == 0 diff --git a/tests/conftest.py b/tests/conftest.py index 97e7c674..2e10a9a4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -36,6 +36,7 @@ META_FILEPATH, NGINX_CONFIG_FILEPATH, NGINX_CONTAINER_NAME, + NODE_DATA_PATH, REDIS_URI, REMOVED_CONTAINERS_FOLDER_PATH, SCHAIN_NODE_DATA_PATH, @@ -148,6 +149,10 @@ def ssl_folder(): @pytest.fixture def active_node_option(): + if os.path.isdir(NODE_DATA_PATH): + shutil.rmtree(NODE_DATA_PATH) + path = pathlib.Path(NODE_DATA_PATH) + path.mkdir(parents=True, exist_ok=True) node_options = NodeOptions() node_options.node_mode = NodeMode.ACTIVE try: @@ -158,6 +163,10 @@ def active_node_option(): @pytest.fixture def passive_node_option(): + if os.path.isdir(NODE_DATA_PATH): + shutil.rmtree(NODE_DATA_PATH) + path = pathlib.Path(NODE_DATA_PATH) + path.mkdir(parents=True, exist_ok=True) node_options = NodeOptions() node_options.node_mode = NodeMode.PASSIVE try: diff --git a/text.yml b/text.yml index 85ab8328..bde70b7f 100644 --- a/text.yml +++ b/text.yml @@ -67,6 +67,9 @@ passive_node: archive: Enable historic state and disable block rotation snapshot_from: IP of the node to take snapshot from snapshot: Start passive node from snapshot + setup: + help: Setup passive Fair node + id: ID of the node in Fair manager lvmpy: help: Lvmpy commands @@ -87,9 +90,9 @@ fair: not_inited: Node should be initialized to proceed with operation registered: Node is registered in Fair manager. + setup_complete: Passive node setup complete. register: help: Register node in Fair manager - name: Name of the node in Fair manager ip: IP address of the node in Fair manager ip_changed: Node IP changed in Fair manager change-ip: From 9dc3dd21087872a956ba87301cbfa3903b88ac1a Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 12 Aug 2025 11:36:33 +0100 Subject: [PATCH 190/332] Fix ruff checks --- node_cli/cli/passive_node.py | 2 +- node_cli/utils/docker_utils.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index 4e9583f1..c53521cf 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -23,7 +23,7 @@ from node_cli.core.node import init_passive, update_passive, cleanup_passive from node_cli.fair.passive import setup_fair_passive -from node_cli.utils.helper import abort_if_false, error_exit, streamed_cmd, URL_TYPE, IP_TYPE +from node_cli.utils.helper import abort_if_false, error_exit, streamed_cmd, URL_TYPE from node_cli.utils.texts import safe_load_texts diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 83a8dc6f..71400b67 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -84,8 +84,10 @@ } BASE_PASSIVE_FAIR_COMPOSE_SERVICES = { - 'fair-passive-admin': 'fair_passive_admin', + 'fair-admin': 'fair_admin', + 'fair-api': 'fair_api', 'nginx': 'skale_nginx', + **REDIS_SERVICE_DICT, } MONITORING_COMPOSE_SERVICES = { From 45a92545275d62078e6894fe65733ef750dfef1c Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 12 Aug 2025 23:53:12 +0100 Subject: [PATCH 191/332] Fix setup cmd for skale node, add watchdog and filebeat to the passive node --- node_cli/cli/passive_fair_node.py | 7 +++++++ node_cli/cli/passive_node.py | 7 ------- node_cli/configs/routes.py | 1 + node_cli/utils/docker_utils.py | 2 ++ tests/.skale/node_data/.gitkeep | 0 tests/routes_test.py | 1 + text.yml | 7 ++++--- 7 files changed, 15 insertions(+), 10 deletions(-) delete mode 100644 tests/.skale/node_data/.gitkeep diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index f1b79085..046ff41f 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -22,6 +22,7 @@ from node_cli.fair.common import init as init_fair from node_cli.fair.common import update as update_fair from node_cli.fair.common import cleanup as cleanup_fair +from node_cli.fair.passive import setup_fair_passive from node_cli.utils.helper import abort_if_false, streamed_cmd from node_cli.utils.node_type import NodeMode from node_cli.utils.texts import safe_load_texts @@ -85,3 +86,9 @@ def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: b @streamed_cmd def cleanup_node(): cleanup_fair() + + +@passive_node.command('setup', help=TEXTS['fair']['node']['setup']['help']) +@click.option('--id', required=True, type=int, help=TEXTS['fair']['node']['setup']['id']) +def _setup(id: int) -> None: + setup_fair_passive(node_id=id) diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index c53521cf..6be74b28 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -22,7 +22,6 @@ import click from node_cli.core.node import init_passive, update_passive, cleanup_passive -from node_cli.fair.passive import setup_fair_passive from node_cli.utils.helper import abort_if_false, error_exit, streamed_cmd, URL_TYPE from node_cli.utils.texts import safe_load_texts @@ -84,9 +83,3 @@ def _update_passive(env_file, unsafe_ok): @streamed_cmd def _cleanup_passive() -> None: cleanup_passive() - - -@passive_node.command('setup', help=TEXTS['setup']['help']) -@click.option('--id', required=True, type=int, help=TEXTS['setup']['id']) -def _setup(id: int) -> None: - setup_fair_passive(node_id=id) diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index a0f15ef1..7d7d9e9c 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -42,6 +42,7 @@ 'wallet': ['info', 'send-eth'], 'fair-node': ['info', 'register', 'set-domain-name', 'change-ip', 'exit'], 'fair-chain': ['record', 'checks'], + 'fair-node-passive': ['setup'], } } diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 71400b67..06427a20 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -87,6 +87,8 @@ 'fair-admin': 'fair_admin', 'fair-api': 'fair_api', 'nginx': 'skale_nginx', + 'watchdog': 'skale_watchdog', + 'filebeat': 'skale_filebeat', **REDIS_SERVICE_DICT, } diff --git a/tests/.skale/node_data/.gitkeep b/tests/.skale/node_data/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/tests/routes_test.py b/tests/routes_test.py index 7a1216a8..19845fba 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -38,6 +38,7 @@ '/api/v1/fair-node/exit', '/api/v1/fair-chain/record', '/api/v1/fair-chain/checks', + '/api/v1/fair-node-passive/setup', ] diff --git a/text.yml b/text.yml index bde70b7f..4291e475 100644 --- a/text.yml +++ b/text.yml @@ -67,9 +67,6 @@ passive_node: archive: Enable historic state and disable block rotation snapshot_from: IP of the node to take snapshot from snapshot: Start passive node from snapshot - setup: - help: Setup passive Fair node - id: ID of the node in Fair manager lvmpy: help: Lvmpy commands @@ -89,6 +86,10 @@ fair: repair_requested: Repair mode is requested not_inited: Node should be initialized to proceed with operation + setup: + help: Setup passive Fair node + id: ID of the node in Fair manager + registered: Node is registered in Fair manager. setup_complete: Passive node setup complete. register: From 34762f143001499122b25f13f0ffb0293d913815 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 14 Aug 2025 19:47:30 +0100 Subject: [PATCH 192/332] Add passive node options FAIR, add id to init command --- node_cli/cli/fair_node.py | 2 +- node_cli/cli/passive_fair_node.py | 33 ++++++++++++++++++++++++++++--- node_cli/configs/__init__.py | 2 +- node_cli/core/node_options.py | 12 +++++++++++ node_cli/fair/common.py | 27 +++++++++++++++++++++---- node_cli/operations/base.py | 10 ++-------- node_cli/operations/fair.py | 29 +++++++++++++++++++++------ node_cli/utils/docker_utils.py | 6 ++---- text.yml | 2 +- 9 files changed, 95 insertions(+), 28 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 07533802..4a289a28 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -136,7 +136,7 @@ def migrate_node(env_filepath: str) -> None: @node.command('repair', help='Toggle fair chain repair mode') @click.option( - '--snapshot-from', + '--snapshot', type=URL_OR_ANY_TYPE, default='any', hidden=True, diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index 046ff41f..dc32c394 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -23,7 +23,12 @@ from node_cli.fair.common import update as update_fair from node_cli.fair.common import cleanup as cleanup_fair from node_cli.fair.passive import setup_fair_passive -from node_cli.utils.helper import abort_if_false, streamed_cmd +from node_cli.utils.helper import ( + URL_OR_ANY_TYPE, + abort_if_false, + error_exit, + streamed_cmd, +) from node_cli.utils.node_type import NodeMode from node_cli.utils.texts import safe_load_texts @@ -42,9 +47,31 @@ def passive_node(): @passive_node.command('init', help='Initialize a passive Fair node') @click.argument('env_filepath') +@click.option('--id', required=True, type=int, help=TEXTS['fair']['node']['setup']['id']) +@click.option('--indexer', help=TEXTS['passive_node']['init']['indexer'], is_flag=True) +@click.option('--archive', help=TEXTS['passive_node']['init']['archive'], is_flag=True) +@click.option( + '--snapshot', + type=URL_OR_ANY_TYPE, + default=None, + help=TEXTS['passive_node']['init']['snapshot_from'], +) @streamed_cmd -def init_passive_node(env_filepath: str): - init_fair(node_mode=NodeMode.PASSIVE, env_filepath=env_filepath) +def init_passive_node( + env_filepath: str, id: int, indexer: bool, archive: bool, snapshot: str | None +): + if indexer and archive: + error_exit('Cannot use both --indexer and --archive options') + if (indexer or archive) and snapshot == 'any': + error_exit('Cannot use any for indexer/archive node') + init_fair( + node_mode=NodeMode.PASSIVE, + env_filepath=env_filepath, + node_id=id, + indexer=indexer, + archive=archive, + snapshot=snapshot, + ) @passive_node.command('update', help='Update Fair node') diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 8fbecb04..5a09dd83 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -57,7 +57,6 @@ COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') PASSIVE_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-passive.yml') FAIR_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-fair.yml') -PASSIVE_FAIR_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-fair-passive.yml') STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') FAIR_STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'fair_static_params.yaml') @@ -141,6 +140,7 @@ def _get_env(): TM_INIT_TIMEOUT = 20 RESTORE_SLEEP_TIMEOUT = 20 +INIT_TIMEOUT = 20 META_FILEPATH = os.path.join(NODE_DATA_PATH, 'meta.json') diff --git a/node_cli/core/node_options.py b/node_cli/core/node_options.py index fc766f3a..764e3b5f 100644 --- a/node_cli/core/node_options.py +++ b/node_cli/core/node_options.py @@ -89,6 +89,18 @@ def mark_passive_node() -> None: logger.info('Node marked as passive.') +def set_passive_node_options( + archive: bool, + indexer: bool, +) -> None: + node_options = NodeOptions() + node_options.node_mode = NodeMode.PASSIVE + node_options.archive = archive or indexer + node_options.catchup = archive or indexer + node_options.historic_state = archive + logger.info('Node options set for passive mode.') + + class NodeModeMismatchError(Exception): pass diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 52524421..0a67f3e0 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -20,11 +20,12 @@ import time import logging -from node_cli.configs import RESTORE_SLEEP_TIMEOUT, SKALE_DIR +from node_cli.configs import INIT_TIMEOUT, SKALE_DIR from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.core.node_options import upsert_node_mode +from node_cli.fair.passive import setup_fair_passive from node_cli.operations import ( FairUpdateType, cleanup_fair_op, @@ -45,17 +46,35 @@ @check_not_inited -def init(node_mode: NodeMode, env_filepath: str) -> None: +def init( + node_mode: NodeMode, + env_filepath: str, + node_id: int | None = None, + indexer: bool = False, + archive: bool = False, + snapshot: str | None = None, +) -> None: env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) if env is None: return save_env_params(env_filepath) env['SKALE_DIR'] = SKALE_DIR - init_ok = init_fair_op(env_filepath, env, node_mode=node_mode) + init_ok = init_fair_op( + env_filepath, + env, + node_mode=node_mode, + indexer=indexer, + archive=archive, + snapshot=snapshot, + ) if not init_ok: error_exit('Init operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) - time.sleep(RESTORE_SLEEP_TIMEOUT) + time.sleep(INIT_TIMEOUT) + + if node_mode == NodeMode.PASSIVE and node_id is not None: + setup_fair_passive(node_id) + print('Fair node is initialized') diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index b9d2cc09..c60222a3 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -42,9 +42,8 @@ from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config from node_cli.core.node_options import ( - NodeOptions, mark_active_node, - mark_passive_node, + set_passive_node_options, upsert_node_mode, ) from node_cli.core.resources import init_shared_space_volume, update_resource_allocation @@ -289,12 +288,7 @@ def init_passive( env_type=env['ENV_TYPE'], ) - node_options = NodeOptions() - node_options.archive = archive or indexer - node_options.catchup = archive or indexer - node_options.historic_state = archive - - mark_passive_node() + set_passive_node_options(archive=archive, indexer=indexer) ensure_filestorage_mapping() link_env_file() diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index d3a04e20..89fef47c 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -38,7 +38,7 @@ from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_no_lvm_datadir from node_cli.core.static_config import get_fair_chain_name -from node_cli.core.node_options import upsert_node_mode +from node_cli.core.node_options import set_passive_node_options, upsert_node_mode from node_cli.fair.record.chain_record import ( get_fair_chain_record, migrate_chain_record, @@ -53,6 +53,7 @@ ) from node_cli.operations.volume import cleanup_volume_artifacts, prepare_block_device from node_cli.utils.docker_utils import ( + BASE_PASSIVE_FAIR_COMPOSE_SERVICES, REDIS_SERVICE_DICT, REDIS_START_TIMEOUT, compose_rm, @@ -79,7 +80,14 @@ class FairUpdateType(Enum): @checked_host -def init(env_filepath: str, env: dict, node_mode: NodeMode) -> bool: +def init( + env_filepath: str, + env: dict, + node_mode: NodeMode, + indexer: bool, + archive: bool, + snapshot: str | None, +) -> bool: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -94,8 +102,18 @@ def init(env_filepath: str, env: dict, node_mode: NodeMode) -> bool: prepare_host(env_filepath, env_type=env['ENV_TYPE']) link_env_file() - upsert_node_mode(node_mode=node_mode) + update_images(env=env, node_type=NodeType.FAIR, node_mode=node_mode) + compose_up( + env=env, node_type=NodeType.FAIR, node_mode=node_mode, services=list(REDIS_SERVICE_DICT) + ) + + upsert_node_mode(node_mode=node_mode) + if node_mode == NodeMode.PASSIVE: + set_passive_node_options(archive=archive, indexer=indexer) + if snapshot: + time.sleep(REDIS_START_TIMEOUT) + trigger_skaled_snapshot_mode(env=env, snapshot_from=snapshot) prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') meta_manager = FairCliMetaManager() @@ -105,9 +123,9 @@ def init(env_filepath: str, env: dict, node_mode: NodeMode) -> bool: distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.FAIR, node_mode=node_mode) + compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) - wait_for_container(REDIS_SERVICE_DICT['redis']) + wait_for_container(BASE_PASSIVE_FAIR_COMPOSE_SERVICES['fair-api']) time.sleep(REDIS_START_TIMEOUT) return True @@ -273,7 +291,6 @@ def trigger_skaled_snapshot_mode(env: dict, snapshot_from: str = 'any') -> None: if not snapshot_from: snapshot_from = 'any' record.set_snapshot_from(snapshot_from) - print(TEXTS['fair']['node']['repair']['repair_requested']) def repair(node_mode: NodeMode, env: dict, snapshot_from: str = 'any') -> None: diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 06427a20..3aa46e56 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -33,7 +33,6 @@ COMPOSE_PATH, FAIR_COMPOSE_PATH, NGINX_CONTAINER_NAME, - PASSIVE_FAIR_COMPOSE_PATH, REMOVED_CONTAINERS_FOLDER_PATH, SGX_CERTIFICATES_DIR_NAME, PASSIVE_COMPOSE_PATH, @@ -298,10 +297,8 @@ def compose_build(env: dict, node_type: NodeType, node_mode: NodeMode): def get_compose_path(node_type: NodeType, node_mode: NodeMode) -> str: if passive_skale(node_type, node_mode): return PASSIVE_COMPOSE_PATH - elif active_fair(node_type, node_mode): + elif active_fair(node_type, node_mode) or passive_fair(node_type, node_mode): return FAIR_COMPOSE_PATH - elif passive_fair(node_type, node_mode): - return PASSIVE_FAIR_COMPOSE_PATH return COMPOSE_PATH @@ -333,6 +330,7 @@ def compose_up( is_fair_boot: bool = False, services: list[str] | None = None, ): + env['PASSIVE_NODE'] = str(node_mode == NodeMode.PASSIVE) if passive_skale(node_type, node_mode) or passive_fair(node_type, node_mode): logger.info('Running containers for passive node') run_cmd(cmd=get_up_compose_cmd(node_type=node_type, node_mode=node_mode), env=env) diff --git a/text.yml b/text.yml index 4291e475..c0cbe23a 100644 --- a/text.yml +++ b/text.yml @@ -82,7 +82,7 @@ fair: repair: help: Repair Fair chain node warning: Are you sure you want to repair Fair chain node? In rare cases may cause data loss and require additional maintenance - snapshot_from: IP of the node to take snapshot from + snapshot_from: IP of the node to take snapshot from (put "any" to use any available node) repair_requested: Repair mode is requested not_inited: Node should be initialized to proceed with operation From 46291315d9968f42b7c1733606446bc06eaf5fa3 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 14 Aug 2025 19:51:38 +0100 Subject: [PATCH 193/332] Fix ruff check --- node_cli/operations/fair.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 89fef47c..40720a91 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -67,7 +67,7 @@ ) from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool from node_cli.utils.meta import FairCliMetaManager -from node_cli.utils.print_formatters import TEXTS, print_failed_requirements_checks +from node_cli.utils.print_formatters import print_failed_requirements_checks from node_cli.utils.node_type import NodeMode, NodeType logger = logging.getLogger(__name__) From d623dc35a5100826ec64cd9a402051012d02d988 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Fri, 15 Aug 2025 01:12:19 +0100 Subject: [PATCH 194/332] Fix fair node tests --- tests/conftest.py | 23 +++++++++++++++++++---- tests/fair/fair_node_test.py | 13 ++++++++----- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 2e10a9a4..a877c32b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -121,7 +121,10 @@ def resource_alloc(): with open(RESOURCE_ALLOCATION_FILEPATH, 'w') as alloc_file: json.dump({}, alloc_file) yield RESOURCE_ALLOCATION_FILEPATH - os.remove(RESOURCE_ALLOCATION_FILEPATH) + try: + os.remove(RESOURCE_ALLOCATION_FILEPATH) + except FileNotFoundError: + pass @pytest.fixture @@ -132,7 +135,10 @@ def inited_node(): try: yield finally: - os.remove(NGINX_CONFIG_FILEPATH) + try: + os.remove(NGINX_CONFIG_FILEPATH) + except FileNotFoundError: + pass @pytest.fixture @@ -158,7 +164,13 @@ def active_node_option(): try: yield finally: - shutil.rmtree(NODE_OPTIONS_FILEPATH) + try: + if os.path.isdir(NODE_OPTIONS_FILEPATH): + shutil.rmtree(NODE_OPTIONS_FILEPATH) + elif os.path.isfile(NODE_OPTIONS_FILEPATH): + os.remove(NODE_OPTIONS_FILEPATH) + except FileNotFoundError: + pass @pytest.fixture @@ -226,7 +238,10 @@ def meta_file_v3(): try: yield META_FILEPATH finally: - os.remove(META_FILEPATH) + try: + os.remove(META_FILEPATH) + except FileNotFoundError: + pass @pytest.fixture diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 861267ca..9dc14983 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -151,9 +151,12 @@ def test_cleanup_success( cleanup() mock_compose_env.assert_called_once_with( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR + SKALE_DIR_ENV_FILEPATH, + save=False, + node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, ) - mock_cleanup_fair_op.assert_called_once_with(mock_env) + mock_cleanup_fair_op.assert_called_once_with(node_mode=NodeMode.ACTIVE, env=mock_env) mock_cleanup_docker_config.assert_called_once() @@ -184,8 +187,8 @@ def test_cleanup_calls_operations_in_correct_order( cleanup() expected_calls = [ - mock.call.compose_env(mock.ANY, save=False, node_type=mock.ANY), - mock.call.cleanup_fair_op(mock_env), + mock.call.compose_env(mock.ANY, save=False, node_type=mock.ANY, node_mode=NodeMode.ACTIVE), + mock.call.cleanup_fair_op(node_mode=NodeMode.ACTIVE, env=mock_env), mock.call.cleanup_docker_config(), ] manager.assert_has_calls(expected_calls, any_order=False) @@ -211,7 +214,7 @@ def test_cleanup_continues_after_fair_op_error( cleanup() mock_compose_env.assert_called_once() - mock_cleanup_fair_op.assert_called_once_with(mock_env) + mock_cleanup_fair_op.assert_called_once_with(node_mode=NodeMode.ACTIVE, env=mock_env) mock_cleanup_docker_config.assert_not_called() From f5befb7cb798dc0e63e9c87bae166104b02b4ee1 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 18 Aug 2025 11:23:02 +0100 Subject: [PATCH 195/332] Fix disk mounting in fair node init --- README.md | 56 ++++++++++++++++++++++++++++++++++++ node_cli/operations/fair.py | 3 +- tests/cli/node_test.py | 7 ++--- tests/fair/fair_node_test.py | 13 +++++++-- 4 files changed, 72 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 7484ad2b..558bc61a 100644 --- a/README.md +++ b/README.md @@ -32,6 +32,7 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 5. [Fair Wallet commands](#fair-wallet-commands) 6. [Fair Logs commands](#fair-logs-commands) 7. [Fair SSL commands](#fair-ssl-commands) + 8. [Passive Fair Node commands](#passive-fair-node-commands) 5. [Exit codes](#exit-codes) 6. [Development](#development) @@ -1076,6 +1077,61 @@ Options: * `--no-client` - Skip client connection for openssl check. * `--no-wss` - Skip WSS server starting for skaled check. +### Passive Fair Node commands + +> Prefix: `fair passive-node` (passive Fair build) + +Commands for operating a passive Fair node (sync/indexer/archive). + +#### Passive Fair Node Initialization + +Initialize a passive Fair node. + +```shell +fair passive-node init --id [--indexer | --archive] [--snapshot ] +``` + +Arguments: + +* `ENV_FILEPATH` - Path to the environment file with configuration. + +Required environment variables in `ENV_FILEPATH`: + +* `FAIR_CONTRACTS` - Fair Manager contracts alias or address. +* `NODE_VERSION` - Stream of `skale-node` configs. +* `BOOT_ENDPOINT` - RPC endpoint of Fair network. +* `DISK_MOUNTPOINT` - Mount point for storing chain data. +* `ENV_TYPE` - Environment type (e.g., `mainnet`, `devnet`). + +Options: + +* `--id` - Numerical node identifier (required). +* `--indexer` - Run in indexer mode (no block rotation). +* `--archive` - Run in archive mode (historical state kept; disables block rotation). Mutually exclusive with `--indexer`. +* `--snapshot ` - Start from provided snapshot URL or from any available source (not allowed together with `--indexer` or `--archive`). + +By default runs a regular sync node. + +#### Passive Fair Node Update + +Update software / configs for passive Fair node. + +```shell +fair passive-node update [--yes] +``` + +#### Passive Fair Node Cleanup + +Remove all passive Fair node data and containers. + +```shell +fair passive-node cleanup [--yes] +``` + +Options: + +* `--yes` - Proceed without confirmation. + *** ## Exit codes diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 40720a91..772791f3 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -103,6 +103,8 @@ def init( prepare_host(env_filepath, env_type=env['ENV_TYPE']) link_env_file() + prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + update_images(env=env, node_type=NodeType.FAIR, node_mode=node_mode) compose_up( env=env, node_type=NodeType.FAIR, node_mode=node_mode, services=list(REDIS_SERVICE_DICT) @@ -114,7 +116,6 @@ def init( if snapshot: time.sleep(REDIS_START_TIMEOUT) trigger_skaled_snapshot_mode(env=env, snapshot_from=snapshot) - prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') meta_manager = FairCliMetaManager() meta_manager.update_meta( diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 13d3d0c5..3361e083 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -325,7 +325,6 @@ def test_backup(): 'node_type,node_mode,test_user_conf', [ (NodeType.SKALE, NodeMode.ACTIVE, 'regular_user_conf'), - (NodeType.SKALE, NodeMode.PASSIVE, 'passive_user_conf'), (NodeType.FAIR, NodeMode.ACTIVE, 'fair_user_conf'), ], ) @@ -350,7 +349,6 @@ def test_restore(request, node_type, node_mode, test_user_conf, mocked_g_config, patch('node_cli.configs.user.validate_alias_or_address'), ): user_conf_path = request.getfixturevalue(test_user_conf).as_posix() - result = run_command(restore_node, [backup_path, user_conf_path]) assert result.exit_code == 0 assert 'Node is restored from backup\n' in result.output # noqa @@ -386,7 +384,7 @@ def test_maintenance_off(mocked_g_config): ) -def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf): +def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf, active_node_option): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), @@ -402,6 +400,7 @@ def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf): _turn_off, ['--maintenance-on', '--yes'], ) + assert ( result.output == 'Setting maintenance mode on...\nNode is successfully set in maintenance mode\n' @@ -418,7 +417,7 @@ def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf): assert result.exit_code == CLIExitCodes.UNSAFE_UPDATE -def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf): +def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf, active_node_option): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 9dc14983..7b621499 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -23,6 +23,7 @@ def test_restore_fair( mock_sleep, valid_env_file, ensure_meta_removed, + active_node_option, ): mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env @@ -31,11 +32,16 @@ def test_restore_fair( restore(backup_path, valid_env_file) - mock_compose_env.assert_called_once_with(valid_env_file, node_type=NodeType.FAIR) + mock_compose_env.assert_called_once_with( + valid_env_file, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE + ) mock_save_env.assert_called_once_with(valid_env_file) expected_env = {**mock_env, 'SKALE_DIR': SKALE_DIR} mock_restore_op.assert_called_once_with( - NodeMode.ACTIVE, expected_env, backup_path, config_only=False + node_mode=NodeMode.ACTIVE, + env=expected_env, + backup_path=backup_path, + config_only=False, ) mock_sleep.assert_called_once() @@ -60,6 +66,7 @@ def test_init_fair_boot( mock_compose_env.assert_called_once_with( valid_env_file, node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, is_fair_boot=True, ) mock_init_op.assert_called_once_with(valid_env_file, mock_env) @@ -96,6 +103,7 @@ def test_update_fair_boot( sync_schains=False, pull_config_for_schain=pull_config_for_schain, node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, is_fair_boot=True, ) mock_update_op.assert_called_once_with(valid_env_file, mock_env) @@ -126,6 +134,7 @@ def test_migrate_from_boot( inited_node=True, sync_schains=False, node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, ) mock_migrate_op.assert_called_once_with( valid_env_file, mock_env, update_type=FairUpdateType.FROM_BOOT, force_skaled_start=False From 48d587c928e7dd8dbba465752ad3cdbae5eb04e9 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 18 Aug 2025 11:48:09 +0100 Subject: [PATCH 196/332] Update fair node tests --- tests/fair/fair_node_test.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 7b621499..be487b8f 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -71,7 +71,9 @@ def test_init_fair_boot( ) mock_init_op.assert_called_once_with(valid_env_file, mock_env) mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.FAIR, is_fair_boot=True) + mock_is_alive.assert_called_once_with( + node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True + ) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) @@ -108,7 +110,9 @@ def test_update_fair_boot( ) mock_update_op.assert_called_once_with(valid_env_file, mock_env) mock_sleep.assert_called_once() - mock_is_alive.assert_called_once_with(node_type=NodeType.FAIR, is_fair_boot=True) + mock_is_alive.assert_called_once_with( + node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True + ) @mock.patch('node_cli.fair.active.update_fair_op') @@ -137,7 +141,11 @@ def test_migrate_from_boot( node_mode=NodeMode.ACTIVE, ) mock_migrate_op.assert_called_once_with( - valid_env_file, mock_env, update_type=FairUpdateType.FROM_BOOT, force_skaled_start=False + valid_env_file, + mock_env, + node_mode=NodeMode.ACTIVE, + update_type=FairUpdateType.FROM_BOOT, + force_skaled_start=False, ) From 20aa725fee8a15f26843bca95e1fcdc3b0e10442 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 18 Aug 2025 12:09:19 +0100 Subject: [PATCH 197/332] Update fair node tests --- tests/fair/fair_node_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index be487b8f..0fb907d8 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -223,6 +223,7 @@ def test_cleanup_continues_after_fair_op_error( inited_node, resource_alloc, meta_file_v3, + active_node_option, ): mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env From d26de21f6cc545363ebff2269153a23af5de5aba Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 11:43:13 +0100 Subject: [PATCH 198/332] Skip host requirements for fair passive nodes, update tests --- node_cli/cli/fair_node.py | 4 ++-- node_cli/cli/passive_fair_node.py | 2 +- node_cli/core/checks.py | 16 +++++++++++----- node_cli/fair/common.py | 4 ++-- tests/core/core_checks_test.py | 14 +++++++------- 5 files changed, 23 insertions(+), 17 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 4a289a28..7022f51d 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -22,7 +22,7 @@ from node_cli.core.node import backup from node_cli.fair.active import change_ip as change_ip_fair -from node_cli.fair.common import cleanup as fair_cleanup +from node_cli.fair.common import cleanup as cleanup_fair from node_cli.fair.active import exit as exit_fair from node_cli.fair.active import ( get_node_info, @@ -164,7 +164,7 @@ def repair(snapshot_from: str = 'any') -> None: ) @streamed_cmd def cleanup_node(): - fair_cleanup() + cleanup_fair(node_mode=NodeMode.ACTIVE) @node.command('change-ip', help=TEXTS['fair']['node']['change-ip']['help']) diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index dc32c394..487ded44 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -112,7 +112,7 @@ def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: b ) @streamed_cmd def cleanup_node(): - cleanup_fair() + cleanup_fair(node_mode=NodeMode.PASSIVE) @passive_node.command('setup', help=TEXTS['fair']['node']['setup']['help']) diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index 929cc60b..d72c4153 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -56,10 +56,12 @@ REPORTS_PATH, ) from node_cli.core.host import is_ufw_ipv6_chain_exists, is_ufw_ipv6_option_enabled +from node_cli.core.node_options import upsert_node_mode from node_cli.core.resources import get_disk_size from node_cli.core.static_config import get_static_params from node_cli.utils.docker_utils import NodeType from node_cli.utils.helper import run_cmd, safe_mkdir +from node_cli.utils.node_type import NodeMode logger = logging.getLogger(__name__) @@ -272,7 +274,7 @@ class PackageChecker(BaseChecker): def __init__(self, requirements: Dict) -> None: super().__init__(requirements=requirements) - def _check_apt_package(self, package_name: str, version: str = None) -> CheckResult: + def _check_apt_package(self, package_name: str, version: str | None = None) -> CheckResult: # TODO: check versions dpkg_cmd_result = run_cmd(['dpkg', '-s', package_name], check_code=False) output = dpkg_cmd_result.stdout.decode('utf-8').strip() @@ -457,12 +459,14 @@ def get_checks(checkers: List[BaseChecker], check_type: CheckType = CheckType.AL ) -def get_all_checkers(disk: str, requirements: Dict) -> List[BaseChecker]: - return [ - MachineChecker(requirements['server'], disk), +def get_all_checkers(disk: str, requirements: Dict, node_mode: NodeMode) -> List[BaseChecker]: + checkers = [ PackageChecker(requirements['package']), DockerChecker(requirements['docker']), ] + if node_mode == NodeMode.ACTIVE: + checkers.append(MachineChecker(requirements['server'], disk)) + return checkers def run_checks( @@ -474,7 +478,9 @@ def run_checks( ) -> ResultList: logger.info('Executing checks. Type: %s', check_type) requirements = get_static_params(node_type, env_type, config_path) - checkers = get_all_checkers(disk, requirements) + node_mode = upsert_node_mode() + + checkers = get_all_checkers(disk, requirements, node_mode) checks = get_checks(checkers, check_type) results = [check() for check in checks] diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 0a67f3e0..ce24ad45 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -79,8 +79,8 @@ def init( @check_user -def cleanup() -> None: - node_mode = upsert_node_mode() +def cleanup(node_mode: NodeMode) -> None: + node_mode = upsert_node_mode(node_mode=node_mode) env = compose_node_env( SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode ) diff --git a/tests/core/core_checks_test.py b/tests/core/core_checks_test.py index 5206efe4..819116d7 100644 --- a/tests/core/core_checks_test.py +++ b/tests/core/core_checks_test.py @@ -21,7 +21,7 @@ save_report, ) -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeMode, NodeType @pytest.fixture @@ -318,18 +318,18 @@ def run_cmd_mock(*args, **kwargs): assert r.status == 'ok' -def test_get_all_checkers(requirements_data): +def test_get_all_checkers(requirements_data, active_node_option): disk = 'test-disk' - checkers = get_all_checkers(disk, requirements_data) + checkers = get_all_checkers(disk, requirements_data, node_mode=NodeMode.ACTIVE) assert len(checkers) == 3 assert isinstance(checkers[0], MachineChecker) assert isinstance(checkers[1], PackageChecker) assert isinstance(checkers[2], DockerChecker) -def test_get_checks(requirements_data): +def test_get_checks(requirements_data, active_node_option): disk = 'test-disk' - checkers = get_all_checkers(disk, requirements_data) + checkers = get_all_checkers(disk, requirements_data, node_mode=NodeMode.ACTIVE) checks = get_checks(checkers) assert len(checks) == 16 checks = get_checks(checkers, check_type=CheckType.PREINSTALL) @@ -338,9 +338,9 @@ def test_get_checks(requirements_data): assert len(checks) == 2 -def test_get_checks_fair(fair_requirements_data): +def test_get_checks_fair(fair_requirements_data, active_node_option): disk = 'test-disk' - fair_checkers = get_all_checkers(disk, fair_requirements_data) + fair_checkers = get_all_checkers(disk, fair_requirements_data, node_mode=NodeMode.ACTIVE) fair_all_checks = get_checks(fair_checkers, CheckType.ALL) fair_all_names = {f.func.__name__ for f in fair_all_checks} From bf871da6691751e549634606aeff4b8d71ec4381 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 13:33:22 +0100 Subject: [PATCH 199/332] Fix cleanup and checkers tests --- tests/core/core_checks_test.py | 6 +++--- tests/fair/fair_node_test.py | 13 +++++++------ 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/tests/core/core_checks_test.py b/tests/core/core_checks_test.py index 819116d7..f06efb9e 100644 --- a/tests/core/core_checks_test.py +++ b/tests/core/core_checks_test.py @@ -322,9 +322,9 @@ def test_get_all_checkers(requirements_data, active_node_option): disk = 'test-disk' checkers = get_all_checkers(disk, requirements_data, node_mode=NodeMode.ACTIVE) assert len(checkers) == 3 - assert isinstance(checkers[0], MachineChecker) - assert isinstance(checkers[1], PackageChecker) - assert isinstance(checkers[2], DockerChecker) + assert isinstance(checkers[0], PackageChecker) + assert isinstance(checkers[1], DockerChecker) + assert isinstance(checkers[2], MachineChecker) def test_get_checks(requirements_data, active_node_option): diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 0fb907d8..e42766ff 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -161,11 +161,12 @@ def test_cleanup_success( inited_node, resource_alloc, meta_file_v3, + active_node_option, ): mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env - cleanup() + cleanup(node_mode=NodeMode.ACTIVE) mock_compose_env.assert_called_once_with( SKALE_DIR_ENV_FILEPATH, @@ -201,7 +202,7 @@ def test_cleanup_calls_operations_in_correct_order( manager.attach_mock(mock_cleanup_fair_op, 'cleanup_fair_op') manager.attach_mock(mock_cleanup_docker_config, 'cleanup_docker_config') - cleanup() + cleanup(node_mode=NodeMode.ACTIVE) expected_calls = [ mock.call.compose_env(mock.ANY, save=False, node_type=mock.ANY, node_mode=NodeMode.ACTIVE), @@ -229,7 +230,7 @@ def test_cleanup_continues_after_fair_op_error( mock_compose_env.return_value = mock_env with pytest.raises(Exception, match='Cleanup failed'): - cleanup() + cleanup(node_mode=NodeMode.ACTIVE) mock_compose_env.assert_called_once() mock_cleanup_fair_op.assert_called_once_with(node_mode=NodeMode.ACTIVE, env=mock_env) @@ -249,14 +250,14 @@ def test_cleanup_fails_when_user_invalid( from node_cli.fair.common import cleanup with pytest.raises(SystemExit): - cleanup() + cleanup(node_mode=NodeMode.ACTIVE) def test_cleanup_fails_when_not_inited(ensure_meta_removed, active_node_option): import pytest with pytest.raises(SystemExit): - cleanup() + cleanup(node_mode=NodeMode.ACTIVE) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) @@ -278,7 +279,7 @@ def test_cleanup_logs_success_message( mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env - cleanup() + cleanup(node_mode=NodeMode.ACTIVE) mock_logger.info.assert_called_once_with( 'Fair node was cleaned up, all containers and data removed' From 569fe0cbd258cb9f431d90a472f4036058dd4090 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 13:34:51 +0100 Subject: [PATCH 200/332] Add test for passive node checker --- tests/conftest.py | 8 +++++++- tests/core/core_checks_test.py | 8 ++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index a877c32b..047e35a0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -184,7 +184,13 @@ def passive_node_option(): try: yield finally: - shutil.rmtree(NODE_OPTIONS_FILEPATH) + try: + if os.path.isdir(NODE_OPTIONS_FILEPATH): + shutil.rmtree(NODE_OPTIONS_FILEPATH) + elif os.path.isfile(NODE_OPTIONS_FILEPATH): + os.remove(NODE_OPTIONS_FILEPATH) + except FileNotFoundError: + pass @pytest.fixture diff --git a/tests/core/core_checks_test.py b/tests/core/core_checks_test.py index f06efb9e..aa986cc7 100644 --- a/tests/core/core_checks_test.py +++ b/tests/core/core_checks_test.py @@ -327,6 +327,14 @@ def test_get_all_checkers(requirements_data, active_node_option): assert isinstance(checkers[2], MachineChecker) +def test_get_all_checkers_passive(requirements_data, passive_node_option): + disk = 'test-disk' + checkers = get_all_checkers(disk, requirements_data, node_mode=NodeMode.PASSIVE) + assert len(checkers) == 2 + assert isinstance(checkers[0], PackageChecker) + assert isinstance(checkers[1], DockerChecker) + + def test_get_checks(requirements_data, active_node_option): disk = 'test-disk' checkers = get_all_checkers(disk, requirements_data, node_mode=NodeMode.ACTIVE) From e957437a067c45cbf14daed5dd34e9da260843ec Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 16:28:29 +0100 Subject: [PATCH 201/332] Add node mode to checked host --- node_cli/core/checks.py | 2 +- node_cli/core/node.py | 2 +- node_cli/operations/base.py | 12 ++++++++---- node_cli/operations/fair.py | 6 ++++-- 4 files changed, 14 insertions(+), 8 deletions(-) diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index d72c4153..2a25319e 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -472,13 +472,13 @@ def get_all_checkers(disk: str, requirements: Dict, node_mode: NodeMode) -> List def run_checks( disk: str, node_type: NodeType, + node_mode: NodeMode, env_type: str = 'mainnet', config_path: str = CONTAINER_CONFIG_PATH, check_type: CheckType = CheckType.ALL, ) -> ResultList: logger.info('Executing checks. Type: %s', check_type) requirements = get_static_params(node_type, env_type, config_path) - node_mode = upsert_node_mode() checkers = get_all_checkers(disk, requirements, node_mode) checks = get_checks(checkers, check_type) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 79ac63c4..1437c86a 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -541,7 +541,7 @@ def run_checks( if disk is None: env_config = get_validated_user_config(node_type=node_type, node_mode=node_mode) disk = env_config.disk_mountpoint - failed_checks = run_host_checks(disk, node_type, network, container_config_path) + failed_checks = run_host_checks(disk, node_type, node_mode, network, container_config_path) if not failed_checks: print('Requirements checking successfully finished!') else: diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index c60222a3..01017d59 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -79,11 +79,12 @@ def checked_host(func): @functools.wraps(func) - def wrapper(env_filepath: str, env: Dict, *args, **kwargs): + def wrapper(env_filepath: str, env: Dict, node_mode: NodeMode, *args, **kwargs): download_skale_node(env.get('NODE_VERSION'), env.get('CONTAINER_CONFIGS_DIR')) failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], TYPE, + node_mode, env['ENV_TYPE'], CONTAINER_CONFIG_TMP_PATH, check_type=CheckType.PREINSTALL, @@ -99,6 +100,7 @@ def wrapper(env_filepath: str, env: Dict, *args, **kwargs): failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], TYPE, + node_mode, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, @@ -155,8 +157,8 @@ def update(env_filepath: str, env: Dict, node_type: NodeType, node_mode: NodeMod @checked_host -def update_fair_boot(env_filepath: str, env: Dict) -> bool: - compose_rm(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, env=env) +def update_fair_boot(env_filepath: str, env: Dict, node_mode: NodeMode = NodeMode.ACTIVE) -> bool: + compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=env) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -233,7 +235,7 @@ def init(env_filepath: str, env: dict, node_type: NodeType, node_mode: NodeMode) @checked_host -def init_fair_boot(env_filepath: str, env: dict) -> None: +def init_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMode.ACTIVE) -> None: sync_skale_node() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -381,6 +383,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], TYPE, + node_mode, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.PREINSTALL, @@ -415,6 +418,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], TYPE, + node_mode, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 772791f3..37274d79 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -132,8 +132,8 @@ def init( @checked_host -def update_fair_boot(env_filepath: str, env: dict) -> bool: - compose_rm(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, env=env) +def update_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMode.ACTIVE) -> bool: + compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=env) remove_dynamic_containers() cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) @@ -236,6 +236,7 @@ def restore(node_mode: NodeMode, env, backup_path, config_only=False): failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], TYPE, + node_mode, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.PREINSTALL, @@ -268,6 +269,7 @@ def restore(node_mode: NodeMode, env, backup_path, config_only=False): failed_checks = run_host_checks( env['DISK_MOUNTPOINT'], TYPE, + node_mode, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, From 70e56e4ec8555949ba7c00a3f72a351e5d5abe9b Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 16:32:06 +0100 Subject: [PATCH 202/332] Fix ruff check --- node_cli/core/checks.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index 2a25319e..9c312476 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -56,7 +56,6 @@ REPORTS_PATH, ) from node_cli.core.host import is_ufw_ipv6_chain_exists, is_ufw_ipv6_option_enabled -from node_cli.core.node_options import upsert_node_mode from node_cli.core.resources import get_disk_size from node_cli.core.static_config import get_static_params from node_cli.utils.docker_utils import NodeType From 3291fa8271c5c4fc3efb013f01a1bf3ca8c9abdb Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 17:55:49 +0100 Subject: [PATCH 203/332] Fix fair boot op --- node_cli/fair/boot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/fair/boot.py b/node_cli/fair/boot.py index 9acf9b63..114bb59a 100644 --- a/node_cli/fair/boot.py +++ b/node_cli/fair/boot.py @@ -45,7 +45,7 @@ def init(env_filepath: str) -> None: is_fair_boot=True, ) - init_fair_boot_op(env_filepath, env) + init_fair_boot_op(env_filepath, env, node_mode) logger.info('Waiting for fair containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=node_type, node_mode=node_mode, is_fair_boot=True): From 432b5091093e54fc2abdf58b30f5d8a3c40b420b Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 19:18:02 +0100 Subject: [PATCH 204/332] Fix fair boot commands --- node_cli/fair/boot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/fair/boot.py b/node_cli/fair/boot.py index 114bb59a..a6415f75 100644 --- a/node_cli/fair/boot.py +++ b/node_cli/fair/boot.py @@ -67,7 +67,7 @@ def update(env_filepath: str, pull_config_for_schain: str) -> None: node_mode=node_mode, is_fair_boot=True, ) - migrate_ok = update_fair_boot_op(env_filepath, env) + migrate_ok = update_fair_boot_op(env_filepath, env, node_mode=NodeMode.ACTIVE) if migrate_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) From 8425e4022c38b8fe82280574e7470122845cdac5 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 19:41:31 +0100 Subject: [PATCH 205/332] Fix fair boot tests --- tests/fair/fair_node_test.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index e42766ff..c3789ff1 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -69,7 +69,11 @@ def test_init_fair_boot( node_mode=NodeMode.ACTIVE, is_fair_boot=True, ) - mock_init_op.assert_called_once_with(valid_env_file, mock_env) + mock_init_op.assert_called_once_with( + valid_env_file, + mock_env, + NodeMode.ACTIVE, + ) mock_sleep.assert_called_once() mock_is_alive.assert_called_once_with( node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True @@ -108,7 +112,11 @@ def test_update_fair_boot( node_mode=NodeMode.ACTIVE, is_fair_boot=True, ) - mock_update_op.assert_called_once_with(valid_env_file, mock_env) + mock_update_op.assert_called_once_with( + valid_env_file, + mock_env, + node_mode=NodeMode.ACTIVE, + ) mock_sleep.assert_called_once() mock_is_alive.assert_called_once_with( node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True From 6f24d6acc39bbd51262cdf1f4a914092b629061c Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 19 Aug 2025 20:17:35 +0100 Subject: [PATCH 206/332] Fix checked host wrapper --- node_cli/operations/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 01017d59..a856ef6f 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -93,7 +93,7 @@ def wrapper(env_filepath: str, env: Dict, node_mode: NodeMode, *args, **kwargs): print_failed_requirements_checks(failed_checks) return False - result = func(env_filepath, env, *args, **kwargs) + result = func(env_filepath, env, node_mode, *args, **kwargs) if not result: return result From 03a9eac54c753a9df6562cc3cf4f3dd1519d30cc Mon Sep 17 00:00:00 2001 From: badrogger Date: Wed, 20 Aug 2025 19:45:30 +0100 Subject: [PATCH 207/332] Add 311 port to ServicePort --- node_cli/core/nftables.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/node_cli/core/nftables.py b/node_cli/core/nftables.py index 50afff82..042ff2e1 100644 --- a/node_cli/core/nftables.py +++ b/node_cli/core/nftables.py @@ -22,9 +22,9 @@ import os import shutil import sys +from dataclasses import dataclass from pathlib import Path from typing import Optional -from dataclasses import dataclass from node_cli.configs import ( ENV, @@ -44,7 +44,8 @@ class ServicePort: DNS: int = 53 CADVISOR: int = 9100 EXPORTER: int = 8080 - WATCHDOG: int = 3009 + WATCHDOG_HTTP: int = 3009 + WATCHDOG_HTTPS: int = 311 HTTPS: int = 443 HTTP: int = 80 @@ -561,7 +562,8 @@ def setup_firewall(self, enable_monitoring: bool = False) -> None: ServicePort.DNS, ServicePort.HTTPS, ServicePort.HTTP, - ServicePort.WATCHDOG, + ServicePort.WATCHDOG_HTTP, + ServicePort.WATCHDOG_HTTPS, ] if enable_monitoring: tcp_ports.extend([ServicePort.EXPORTER, ServicePort.CADVISOR]) @@ -601,7 +603,8 @@ def cleanup_legacy_rules(self, ssh: bool = False, dns: bool = False) -> None: self.remove_drop_rule('udp') tcp_ports = [ ServicePort.HTTPS, - ServicePort.WATCHDOG, + ServicePort.WATCHDOG_HTTP, + ServicePort.WATCHDOG_HTTPS, ServicePort.EXPORTER, ServicePort.CADVISOR, ServicePort.DNS, # tcp is redundant, making sure it's removed From c65ec3616159118531ee5eb3ae22767d5f622b54 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 21 Aug 2025 17:50:47 +0100 Subject: [PATCH 208/332] Rename disk mountpoint to block device --- .gitignore | 2 +- README.md | 16 ++++++++-------- node_cli/cli/__init__.py | 2 +- node_cli/configs/user.py | 2 +- node_cli/core/node.py | 2 +- node_cli/operations/base.py | 24 ++++++++++++------------ node_cli/operations/docker_lvmpy.py | 4 ++-- node_cli/operations/fair.py | 12 ++++++------ node_cli/operations/volume.py | 2 +- tests/conftest.py | 10 +++++----- 10 files changed, 38 insertions(+), 38 deletions(-) diff --git a/.gitignore b/.gitignore index c1b8fee2..87a0d8f8 100644 --- a/.gitignore +++ b/.gitignore @@ -113,7 +113,7 @@ node_cli/cli/info.py meta.json -disk_mountpoint.txt +block_device.txt sgx_server_url.txt resource_allocation.json conf.json diff --git a/README.md b/README.md index 558bc61a..0c811957 100644 --- a/README.md +++ b/README.md @@ -157,7 +157,7 @@ Arguments: Required environment variables in `ENV_FILE`: * `SGX_SERVER_URL` - SGX server URL. -* `DISK_MOUNTPOINT` - Mount point for storing sChains data. +* `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g. /dev/sdc) * `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy` to use. * `NODE_VERSION` - Stream of `skale-node` to use. * `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. @@ -566,7 +566,7 @@ Arguments: Required environment variables in `ENV_FILE`: -* `DISK_MOUNTPOINT` - Mount point for storing sChain data. +* `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g. /dev/sdc). * `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy`. * `NODE_VERSION` - Stream of `skale-node`. * `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. @@ -679,7 +679,7 @@ Arguments: Required environment variables in `ENV_FILE`: * `SGX_SERVER_URL` - SGX server URL. -* `DISK_MOUNTPOINT` - Mount point for storing data (BTRFS recommended). +* `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g. /dev/sdc). * `NODE_VERSION` - Stream of `skale-node` configs. * `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager alias or address. @@ -733,7 +733,7 @@ Arguments: Required environment variables in `ENV_FILE`: * `SGX_SERVER_URL` - SGX server URL. -* `DISK_MOUNTPOINT` - Mount point for storing data (BTRFS recommended). +* `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g. /dev/sdc). * `NODE_VERSION` - Stream of `skale-node` configs. * `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager alias or address. @@ -786,7 +786,7 @@ Required environment variables in `ENV_FILEPATH`: * `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). * `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). * `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). -* `DISK_MOUNTPOINT` - Mount point for storing data (e.g., `/dev/sdc`). +* `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g., `/dev/sdc`). * `ENV_TYPE` - Environment type (e.g., `mainnet`). Optional variables: @@ -824,7 +824,7 @@ Required environment variables in `ENV_FILEPATH`: * `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). * `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). * `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). -* `DISK_MOUNTPOINT` - Mount point for storing data (e.g., `/dev/sdc`). +* `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g., `/dev/sdc`). * `ENV_TYPE` - Environment type (e.g., `mainnet`). Optional variables: @@ -855,7 +855,7 @@ Required environment variables in `ENV_FILEPATH`: * `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). * `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). * `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). -* `DISK_MOUNTPOINT` - Mount point for storing data (e.g., `/dev/sdc`). +* `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g., `/dev/sdc`). * `ENV_TYPE` - Environment type (e.g., `mainnet`). Optional variables: @@ -1100,7 +1100,7 @@ Required environment variables in `ENV_FILEPATH`: * `FAIR_CONTRACTS` - Fair Manager contracts alias or address. * `NODE_VERSION` - Stream of `skale-node` configs. * `BOOT_ENDPOINT` - RPC endpoint of Fair network. -* `DISK_MOUNTPOINT` - Mount point for storing chain data. +* `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g., `/dev/sdc`). * `ENV_TYPE` - Environment type (e.g., `mainnet`, `devnet`). Options: diff --git a/node_cli/cli/__init__.py b/node_cli/cli/__init__.py index e2eb22f4..46419e54 100644 --- a/node_cli/cli/__init__.py +++ b/node_cli/cli/__init__.py @@ -1,4 +1,4 @@ -__version__ = '3.1.0' +__version__ = '3.2.0' if __name__ == '__main__': print(__version__) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 2fd8145e..9243a5fb 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -53,7 +53,7 @@ class BaseUserConfig(ABC): node_version: str env_type: str filebeat_host: str - disk_mountpoint: str + block_device: str container_configs_dir: str = '' skip_docker_config: str = '' diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 1437c86a..3253f6c5 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -540,7 +540,7 @@ def run_checks( if disk is None: env_config = get_validated_user_config(node_type=node_type, node_mode=node_mode) - disk = env_config.disk_mountpoint + disk = env_config.block_device failed_checks = run_host_checks(disk, node_type, node_mode, network, container_config_path) if not failed_checks: print('Requirements checking successfully finished!') diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index a856ef6f..f99fbc6b 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -82,7 +82,7 @@ def checked_host(func): def wrapper(env_filepath: str, env: Dict, node_mode: NodeMode, *args, **kwargs): download_skale_node(env.get('NODE_VERSION'), env.get('CONTAINER_CONFIGS_DIR')) failed_checks = run_host_checks( - env['DISK_MOUNTPOINT'], + env['BLOCK_DEVICE'], TYPE, node_mode, env['ENV_TYPE'], @@ -98,7 +98,7 @@ def wrapper(env_filepath: str, env: Dict, node_mode: NodeMode, *args, **kwargs): return result failed_checks = run_host_checks( - env['DISK_MOUNTPOINT'], + env['BLOCK_DEVICE'], TYPE, node_mode, env['ENV_TYPE'], @@ -160,7 +160,7 @@ def update(env_filepath: str, env: Dict, node_type: NodeType, node_mode: NodeMod def update_fair_boot(env_filepath: str, env: Dict, node_mode: NodeMode = NodeMode.ACTIVE) -> bool: compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=env) remove_dynamic_containers() - cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) + cleanup_volume_artifacts(env['BLOCK_DEVICE']) sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -172,7 +172,7 @@ def update_fair_boot(env_filepath: str, env: Dict, node_mode: NodeMode = NodeMod configure_nftables(enable_monitoring=enable_monitoring) generate_nginx_config() - prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') prepare_host(env_filepath, env['ENV_TYPE']) @@ -237,7 +237,7 @@ def init(env_filepath: str, env: dict, node_type: NodeType, node_mode: NodeMode) @checked_host def init_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMode.ACTIVE) -> None: sync_skale_node() - cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) + cleanup_volume_artifacts(env['BLOCK_DEVICE']) ensure_btrfs_kernel_module_autoloaded() if env.get('SKIP_DOCKER_CONFIG') != 'True': @@ -253,7 +253,7 @@ def init_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMode. configure_filebeat() configure_flask() generate_nginx_config() - prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') meta_manager = FairCliMetaManager() meta_manager.update_meta( @@ -275,7 +275,7 @@ def init_passive( snapshot: bool, snapshot_from: Optional[str], ) -> None: - cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) + cleanup_volume_artifacts(env['BLOCK_DEVICE']) download_skale_node(env.get('NODE_VERSION'), env.get('CONTAINER_CONFIGS_DIR')) sync_skale_node() @@ -296,7 +296,7 @@ def init_passive( link_env_file() generate_nginx_config() - prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') meta_manager = CliMetaManager() meta_manager.update_meta( @@ -320,7 +320,7 @@ def init_passive( def update_passive(env_filepath: str, env: Dict) -> bool: compose_rm(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) remove_dynamic_containers() - cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) + cleanup_volume_artifacts(env['BLOCK_DEVICE']) download_skale_node(env['NODE_VERSION'], env.get('CONTAINER_CONFIGS_DIR')) sync_skale_node() @@ -332,7 +332,7 @@ def update_passive(env_filepath: str, env: Dict) -> bool: ensure_filestorage_mapping() - prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') generate_nginx_config() prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) @@ -381,7 +381,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): node_mode = upsert_node_mode(node_mode=NodeMode.ACTIVE) unpack_backup_archive(backup_path) failed_checks = run_host_checks( - env['DISK_MOUNTPOINT'], + env['BLOCK_DEVICE'], TYPE, node_mode, env['ENV_TYPE'], @@ -416,7 +416,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): compose_up(env=env, node_type=node_type, node_mode=node_mode) failed_checks = run_host_checks( - env['DISK_MOUNTPOINT'], + env['BLOCK_DEVICE'], TYPE, node_mode, env['ENV_TYPE'], diff --git a/node_cli/operations/docker_lvmpy.py b/node_cli/operations/docker_lvmpy.py index 6e28d58c..0b01c249 100644 --- a/node_cli/operations/docker_lvmpy.py +++ b/node_cli/operations/docker_lvmpy.py @@ -41,7 +41,7 @@ def update_docker_lvmpy_env(env): - env['PHYSICAL_VOLUME'] = env['DISK_MOUNTPOINT'] + env['PHYSICAL_VOLUME'] = env['BLOCK_DEVICE'] env['VOLUME_GROUP'] = 'schains' env['FILESTORAGE_MAPPING'] = FILESTORAGE_MAPPING env['MNT_DIR'] = SCHAINS_MNT_DIR_REGULAR @@ -64,7 +64,7 @@ def lvmpy_install(env): ensure_filestorage_mapping() logging.info('Configuring and starting lvmpy') setup_lvmpy( - block_device=env['DISK_MOUNTPOINT'], volume_group=VOLUME_GROUP, exec_start=LVMPY_RUN_CMD + block_device=env['BLOCK_DEVICE'], volume_group=VOLUME_GROUP, exec_start=LVMPY_RUN_CMD ) init_healing_cron() logger.info('docker-lvmpy is configured and started') diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 37274d79..50f87c1d 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -90,7 +90,7 @@ def init( ) -> bool: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() - cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) + cleanup_volume_artifacts(env['BLOCK_DEVICE']) if env.get('SKIP_DOCKER_CONFIG') != 'True': configure_docker() @@ -103,7 +103,7 @@ def init( prepare_host(env_filepath, env_type=env['ENV_TYPE']) link_env_file() - prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') update_images(env=env, node_type=NodeType.FAIR, node_mode=node_mode) compose_up( @@ -135,7 +135,7 @@ def init( def update_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMode.ACTIVE) -> bool: compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=env) remove_dynamic_containers() - cleanup_volume_artifacts(env['DISK_MOUNTPOINT']) + cleanup_volume_artifacts(env['BLOCK_DEVICE']) sync_skale_node() ensure_btrfs_kernel_module_autoloaded() @@ -147,7 +147,7 @@ def update_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMod configure_nftables(enable_monitoring=enable_monitoring) generate_nginx_config() - prepare_block_device(env['DISK_MOUNTPOINT'], force=env['ENFORCE_BTRFS'] == 'True') + prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') prepare_host(env_filepath, env['ENV_TYPE']) @@ -234,7 +234,7 @@ def update( def restore(node_mode: NodeMode, env, backup_path, config_only=False): unpack_backup_archive(backup_path) failed_checks = run_host_checks( - env['DISK_MOUNTPOINT'], + env['BLOCK_DEVICE'], TYPE, node_mode, env['ENV_TYPE'], @@ -267,7 +267,7 @@ def restore(node_mode: NodeMode, env, backup_path, config_only=False): compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) failed_checks = run_host_checks( - env['DISK_MOUNTPOINT'], + env['BLOCK_DEVICE'], TYPE, node_mode, env['ENV_TYPE'], diff --git a/node_cli/operations/volume.py b/node_cli/operations/volume.py index 1595a442..49c1898f 100644 --- a/node_cli/operations/volume.py +++ b/node_cli/operations/volume.py @@ -42,7 +42,7 @@ class FilesystemExistsError(Exception): def update_docker_lvmpy_env(env): - env['PHYSICAL_VOLUME'] = env['DISK_MOUNTPOINT'] + env['PHYSICAL_VOLUME'] = env['BLOCK_DEVICE'] env['VOLUME_GROUP'] = 'schains' env['FILESTORAGE_MAPPING'] = FILESTORAGE_MAPPING env['SCHAINS_MNT_DIR'] = SCHAINS_MNT_DIR_REGULAR diff --git a/tests/conftest.py b/tests/conftest.py index 047e35a0..c6733957 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -297,7 +297,7 @@ def valid_env_params(): 'NODE_VERSION': 'master', 'FILEBEAT_HOST': '127.0.0.1:3010', 'SGX_SERVER_URL': 'http://127.0.0.1', - 'DISK_MOUNTPOINT': '/dev/sss', + 'BLOCK_DEVICE': '/dev/sss', 'DOCKER_LVMPY_STREAM': 'master', 'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test', @@ -362,7 +362,7 @@ def regular_user_conf(tmp_path): NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 - DISK_MOUNTPOINT=/dev/sss + BLOCK_DEVICE=/dev/sss DOCKER_LVMPY_STREAM='master' ENV_TYPE='devnet' MANAGER_CONTRACTS='test-manager' @@ -384,7 +384,7 @@ def fair_user_conf(tmp_path): NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 - DISK_MOUNTPOINT=/dev/sss + BLOCK_DEVICE=/dev/sss ENV_TYPE='devnet' ENFORCE_BTRFS=False FAIR_CONTRACTS='test-fair' @@ -405,7 +405,7 @@ def fair_boot_user_conf(tmp_path): NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 - DISK_MOUNTPOINT=/dev/sss + BLOCK_DEVICE=/dev/sss ENV_TYPE='devnet' MANAGER_CONTRACTS='test-manager' IMA_CONTRACTS='test-ima' @@ -425,7 +425,7 @@ def passive_user_conf(tmp_path): ENDPOINT=http://localhost:8545 NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 - DISK_MOUNTPOINT=/dev/sss + BLOCK_DEVICE=/dev/sss ENV_TYPE='devnet' SCHAIN_NAME='test-schain' ENFORCE_BTRFS=False From 16f8e8e50c59b7062c82ab4333c308626b69a868 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 21 Aug 2025 18:03:26 +0100 Subject: [PATCH 209/332] rename docker lvmpy version, update readme --- README.md | 36 ++++++++++----------------- node_cli/configs/user.py | 2 +- node_cli/operations/base.py | 10 ++++---- node_cli/operations/docker_lvmpy.py | 2 +- node_cli/operations/volume.py | 2 +- node_cli/utils/meta.py | 12 ++++----- node_cli/utils/print_formatters.py | 2 +- tests/cli/node_test.py | 2 +- tests/conftest.py | 4 +-- tests/helper.py | 4 +-- tests/tools_meta_test.py | 38 ++++++++++++++--------------- 11 files changed, 52 insertions(+), 62 deletions(-) diff --git a/README.md b/README.md index 0c811957..808e1b02 100644 --- a/README.md +++ b/README.md @@ -44,9 +44,9 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line Ensure that the following packages are installed: **docker**, **docker-compose** (1.27.4+) -### Standard Node Binary +### SKALE Node Binary -This binary (`skale-VERSION-OS`) is used for managing standard SKALE validator nodes. +This binary (`skale-VERSION-OS`) is used for managing SKALE validator nodes. ```shell # Replace {version} with the desired release version (e.g., 3.0.0) @@ -54,19 +54,9 @@ CLI_VERSION={version} && \ sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$CLI_VERSION/skale-$CLI_VERSION-`uname -s`-`uname -m` > /usr/local/bin/skale" ``` -### Passive Node Binary - -This binary (`skale-VERSION-OS-passive`) is used for managing dedicated Passive nodes. **Ensure you download the correct `-passive` suffixed binary for Passive node operations.** - -```shell -# Replace {version} with the desired release version (e.g., 3.0.0) -CLI_VERSION={version} && \ -sudo -E bash -c "curl -L https://github.com/skalenetwork/node-cli/releases/download/$CLI_VERSION/skale-$CLI_VERSION-`uname -s`-`uname -m`-passive > /usr/local/bin/skale" -``` - ### Fair Node Binary -This binary (`skale-VERSION-OS-fair`) is used specifically for managing nodes on the Fair network. +This binary (`skale-VERSION-OS-fair`) is used for managing nodes on the Fair network. ```shell # Replace {version} with the desired release version (e.g., 3.0.0) @@ -158,8 +148,8 @@ Required environment variables in `ENV_FILE`: * `SGX_SERVER_URL` - SGX server URL. * `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g. /dev/sdc) -* `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy` to use. -* `NODE_VERSION` - Stream of `skale-node` to use. +* `DOCKER_LVMPY_VERSION` - Version of `docker-lvmpy`. +* `NODE_VERSION` - Version of `skale-node`. * `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager `message_proxy_mainnet` contract alias or address. * `IMA_CONTRACTS` - IMA `skale_manager` contract alias or address. @@ -567,8 +557,8 @@ Arguments: Required environment variables in `ENV_FILE`: * `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g. /dev/sdc). -* `DOCKER_LVMPY_STREAM` - Stream of `docker-lvmpy`. -* `NODE_VERSION` - Stream of `skale-node`. +* `DOCKER_LVMPY_VERSION` - Version of `docker-lvmpy`. +* `NODE_VERSION` - Version of `skale-node`. * `ENDPOINT` - RPC endpoint of the network where SKALE Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager alias or address. * `IMA_CONTRACTS` - IMA alias or address. @@ -680,7 +670,7 @@ Required environment variables in `ENV_FILE`: * `SGX_SERVER_URL` - SGX server URL. * `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g. /dev/sdc). -* `NODE_VERSION` - Stream of `skale-node` configs. +* `NODE_VERSION` - Version of `skale-node`. * `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager alias or address. * `IMA_CONTRACTS` - IMA alias or address (*Note: Required by boot service, may not be used by Fair itself*). @@ -734,7 +724,7 @@ Required environment variables in `ENV_FILE`: * `SGX_SERVER_URL` - SGX server URL. * `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g. /dev/sdc). -* `NODE_VERSION` - Stream of `skale-node` configs. +* `NODE_VERSION` - Version of `skale-node`. * `ENDPOINT` - RPC endpoint of the network where Fair Manager is deployed. * `MANAGER_CONTRACTS` - SKALE Manager alias or address. * `IMA_CONTRACTS` - IMA alias or address (*Note: Required by boot service, may not be used by Fair itself*). @@ -783,7 +773,7 @@ Arguments: Required environment variables in `ENV_FILEPATH`: * `FAIR_CONTRACTS` - Fair contracts alias or address (e.g., `mainnet`). -* `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). +* `NODE_VERSION` - Version of `skale-node`. * `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). * `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). * `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g., `/dev/sdc`). @@ -821,7 +811,7 @@ Arguments: Required environment variables in `ENV_FILEPATH`: * `FAIR_CONTRACTS` - Fair contracts alias or address (e.g., `mainnet`). -* `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). +* `NODE_VERSION` - Version of `skale-node`. * `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). * `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). * `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g., `/dev/sdc`). @@ -852,7 +842,7 @@ Arguments: Required environment variables in `ENV_FILEPATH`: * `FAIR_CONTRACTS` - Fair contracts alias or address (e.g., `mainnet`). -* `NODE_VERSION` - Stream of `skale-node` configs (e.g., `fair-main`). +* `NODE_VERSION` - Version of `skale-node`. * `BOOT_ENDPOINT` - RPC endpoint of the Fair network (e.g., `https://rpc.fair.cloud/`). * `SGX_SERVER_URL` - SGX server URL (e.g., `https://127.0.0.1:1026/`). * `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g., `/dev/sdc`). @@ -1098,7 +1088,7 @@ Arguments: Required environment variables in `ENV_FILEPATH`: * `FAIR_CONTRACTS` - Fair Manager contracts alias or address. -* `NODE_VERSION` - Stream of `skale-node` configs. +* `NODE_VERSION` - Version of `skale-node`. * `BOOT_ENDPOINT` - RPC endpoint of Fair network. * `BLOCK_DEVICE` - Absolute path to a dedicated raw block device (e.g., `/dev/sdc`). * `ENV_TYPE` - Environment type (e.g., `mainnet`, `devnet`). diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 9243a5fb..3aa7b831 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -117,7 +117,7 @@ class SkaleUserConfig(BaseUserConfig): endpoint: str manager_contracts: str ima_contracts: str - docker_lvmpy_stream: str + docker_lvmpy_version: str sgx_server_url: str monitoring_containers: str = '' telegraf: str = '' diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index f99fbc6b..2380d91c 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -147,7 +147,7 @@ def update(env_filepath: str, env: Dict, node_type: NodeType, node_mode: NodeMod meta_manager.update_meta( VERSION, env['NODE_VERSION'], - env['DOCKER_LVMPY_STREAM'], + env['DOCKER_LVMPY_VERSION'], distro.id(), distro.version(), ) @@ -224,7 +224,7 @@ def init(env_filepath: str, env: dict, node_type: NodeType, node_mode: NodeMode) meta_manager.update_meta( VERSION, env['NODE_VERSION'], - env['DOCKER_LVMPY_STREAM'], + env['DOCKER_LVMPY_VERSION'], distro.id(), distro.version(), ) @@ -341,7 +341,7 @@ def update_passive(env_filepath: str, env: Dict) -> bool: meta_manager.update_meta( VERSION, env['NODE_VERSION'], - env['DOCKER_LVMPY_STREAM'], + env['DOCKER_LVMPY_VERSION'], distro.id(), distro.version(), ) @@ -363,7 +363,7 @@ def turn_on(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: meta_manager.update_meta( VERSION, env['NODE_VERSION'], - env['DOCKER_LVMPY_STREAM'], + env['DOCKER_LVMPY_VERSION'], distro.id(), distro.version(), ) @@ -408,7 +408,7 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): meta_manager.update_meta( VERSION, env['NODE_VERSION'], - env['DOCKER_LVMPY_STREAM'], + env['DOCKER_LVMPY_VERSION'], distro.id(), distro.version(), ) diff --git a/node_cli/operations/docker_lvmpy.py b/node_cli/operations/docker_lvmpy.py index 0b01c249..fb70e480 100644 --- a/node_cli/operations/docker_lvmpy.py +++ b/node_cli/operations/docker_lvmpy.py @@ -57,7 +57,7 @@ def ensure_filestorage_mapping(mapping_dir=FILESTORAGE_MAPPING): def sync_docker_lvmpy_repo(env): if os.path.isdir(DOCKER_LVMPY_PATH): shutil.rmtree(DOCKER_LVMPY_PATH) - sync_repo(DOCKER_LVMPY_REPO_URL, DOCKER_LVMPY_PATH, env['DOCKER_LVMPY_STREAM']) + sync_repo(DOCKER_LVMPY_REPO_URL, DOCKER_LVMPY_PATH, env['DOCKER_LVMPY_VERSION']) def lvmpy_install(env): diff --git a/node_cli/operations/volume.py b/node_cli/operations/volume.py index 49c1898f..e944547a 100644 --- a/node_cli/operations/volume.py +++ b/node_cli/operations/volume.py @@ -58,7 +58,7 @@ def ensure_filestorage_mapping(mapping_dir=FILESTORAGE_MAPPING): def sync_docker_lvmpy_repo(env): if os.path.isdir(DOCKER_LVMPY_PATH): shutil.rmtree(DOCKER_LVMPY_PATH) - sync_repo(DOCKER_LVMPY_REPO_URL, DOCKER_LVMPY_PATH, env['DOCKER_LVMPY_STREAM']) + sync_repo(DOCKER_LVMPY_REPO_URL, DOCKER_LVMPY_PATH, env['DOCKER_LVMPY_VERSION']) def docker_lvmpy_update(env): diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index 0dafd54d..bb8ad8af 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -7,7 +7,7 @@ DEFAULT_VERSION = '1.0.0' DEFAULT_CONFIG_STREAM = '1.1.0' -DEFAULT_DOCKER_LVMPY_STREAM = '1.0.0' +DEFAULT_DOCKER_LVMPY_VERSION = '1.0.0' DEFAULT_OS_ID = 'ubuntu' DEFAULT_OS_VERSION = '18.04' @@ -26,13 +26,13 @@ def asdict(self) -> dict: @dataclass class CliMeta(CliMetaBase): - docker_lvmpy_stream: str = DEFAULT_DOCKER_LVMPY_STREAM + docker_lvmpy_version: str = DEFAULT_DOCKER_LVMPY_VERSION def asdict(self) -> dict: return { 'version': self.version, 'config_stream': self.config_stream, - 'docker_lvmpy_stream': self.docker_lvmpy_stream, + 'docker_lvmpy_version': self.docker_lvmpy_version, 'os_id': self.os_id, 'os_version': self.os_version, } @@ -96,7 +96,7 @@ def get_meta_info(self, raw: bool = False) -> CliMeta | dict | None: def compose_default_meta(self) -> CliMeta: return CliMeta( version=DEFAULT_VERSION, - docker_lvmpy_stream=DEFAULT_DOCKER_LVMPY_STREAM, + docker_lvmpy_version=DEFAULT_DOCKER_LVMPY_VERSION, config_stream=DEFAULT_CONFIG_STREAM, os_id=DEFAULT_OS_ID, os_version=DEFAULT_OS_VERSION, @@ -106,7 +106,7 @@ def update_meta( self, version: str, config_stream: str, - docker_lvmpy_stream: str | None, + docker_lvmpy_version: str | None, os_id: str, os_version: str, ) -> None: @@ -116,7 +116,7 @@ def update_meta( config_stream, os_id, os_version, - docker_lvmpy_stream, + docker_lvmpy_version, ) self.save_meta(meta) diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index 821e51f6..7edec113 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -319,7 +319,7 @@ def print_meta_info(meta_info: CliMeta) -> None: {LONG_LINE} Version: {meta_info.version} Config Stream: {meta_info.config_stream} - Lvmpy stream: {meta_info.docker_lvmpy_stream} + Lvmpy stream: {meta_info.docker_lvmpy_version} {LONG_LINE} """) ) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 3361e083..bfd5263b 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -471,5 +471,5 @@ def test_node_version(meta_file_v2): assert result.exit_code == 0 assert ( result.output - == "{'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_stream': '1.1.2'}\n" + == "{'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_version': '1.1.2'}\n" ) diff --git a/tests/conftest.py b/tests/conftest.py index c6733957..5434e697 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -298,7 +298,7 @@ def valid_env_params(): 'FILEBEAT_HOST': '127.0.0.1:3010', 'SGX_SERVER_URL': 'http://127.0.0.1', 'BLOCK_DEVICE': '/dev/sss', - 'DOCKER_LVMPY_STREAM': 'master', + 'DOCKER_LVMPY_VERSION': 'master', 'ENV_TYPE': 'devnet', 'SCHAIN_NAME': 'test', 'ENFORCE_BTRFS': 'False', @@ -363,7 +363,7 @@ def regular_user_conf(tmp_path): FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 BLOCK_DEVICE=/dev/sss - DOCKER_LVMPY_STREAM='master' + DOCKER_LVMPY_VERSION='master' ENV_TYPE='devnet' MANAGER_CONTRACTS='test-manager' IMA_CONTRACTS='test-ima' diff --git a/tests/helper.py b/tests/helper.py index 7bf18a98..6af0af14 100644 --- a/tests/helper.py +++ b/tests/helper.py @@ -34,12 +34,12 @@ TEST_META_V1 = {'version': '0.1.1', 'config_stream': 'develop'} -TEST_META_V2 = {'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_stream': '1.1.2'} +TEST_META_V2 = {'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_version': '1.1.2'} TEST_META_V3 = { 'version': '0.1.1', 'config_stream': 'develop', - 'docker_lvmpy_stream': '1.1.2', + 'docker_lvmpy_version': '1.1.2', 'os_id': 'ubuntu', 'os_version': '18.04', } diff --git a/tests/tools_meta_test.py b/tests/tools_meta_test.py index a2a6a2fc..c82e435c 100644 --- a/tests/tools_meta_test.py +++ b/tests/tools_meta_test.py @@ -17,21 +17,21 @@ def test_get_meta_info_v1(meta_file_v1): meta = CliMetaManager().get_meta_info() assert meta.version == TEST_META_V1['version'] assert meta.config_stream == TEST_META_V1['config_stream'] - assert meta.docker_lvmpy_stream == '1.0.0' + assert meta.docker_lvmpy_version == '1.0.0' def test_get_meta_info_v2(meta_file_v2): meta = CliMetaManager().get_meta_info() assert meta.version == TEST_META_V2['version'] assert meta.config_stream == TEST_META_V2['config_stream'] - assert meta.docker_lvmpy_stream == TEST_META_V2['docker_lvmpy_stream'] + assert meta.docker_lvmpy_version == TEST_META_V2['docker_lvmpy_version'] def test_get_meta_info_v3(meta_file_v3): meta = CliMetaManager().get_meta_info() assert meta.version == TEST_META_V3['version'] assert meta.config_stream == TEST_META_V3['config_stream'] - assert meta.docker_lvmpy_stream == TEST_META_V3['docker_lvmpy_stream'] + assert meta.docker_lvmpy_version == TEST_META_V3['docker_lvmpy_version'] assert meta.os_id == TEST_META_V3['os_id'] assert meta.os_version == TEST_META_V3['os_version'] @@ -45,7 +45,7 @@ def test_compose_default_meta(): meta = CliMetaManager().compose_default_meta() assert meta.version == '1.0.0' assert meta.config_stream == '1.1.0' - assert meta.docker_lvmpy_stream == '1.0.0' + assert meta.docker_lvmpy_version == '1.0.0' assert meta.os_id == 'ubuntu' assert meta.os_version == '18.04' @@ -58,7 +58,7 @@ def test_save_meta(meta_file_v2): assert saved_json == { 'version': '1.1.2', 'config_stream': '2.2.2', - 'docker_lvmpy_stream': '1.0.0', + 'docker_lvmpy_version': '1.0.0', 'os_id': 'ubuntu', 'os_version': '18.04', } @@ -69,14 +69,14 @@ def test_update_meta_from_v2_to_v3(meta_file_v2): CliMetaManager().update_meta( version='3.3.3', config_stream='1.1.1', - docker_lvmpy_stream='1.2.2', + docker_lvmpy_version='1.2.2', os_id='debian', os_version='11', ) meta = CliMetaManager().get_meta_info() assert meta.version == '3.3.3' assert meta.config_stream == '1.1.1' - assert meta.docker_lvmpy_stream == '1.2.2' + assert meta.docker_lvmpy_version == '1.2.2' assert meta.os_id == 'debian' assert meta.os_version == '11' assert meta != old_meta @@ -86,14 +86,14 @@ def test_update_meta_from_v1(meta_file_v1): CliMetaManager().update_meta( version='4.4.4', config_stream='beta', - docker_lvmpy_stream='1.3.3', + docker_lvmpy_version='1.3.3', os_id='debian', os_version='11', ) meta = CliMetaManager().get_meta_info() assert meta.version == '4.4.4' assert meta.config_stream == 'beta' - assert meta.docker_lvmpy_stream == '1.3.3' + assert meta.docker_lvmpy_version == '1.3.3' assert meta.os_id == 'debian' assert meta.os_version == '11' @@ -102,14 +102,14 @@ def test_update_meta_from_v3(meta_file_v3): CliMetaManager().update_meta( version='5.5.5', config_stream='stable', - docker_lvmpy_stream='1.2.3', + docker_lvmpy_version='1.2.3', os_id='ubuntu', os_version='20.04', ) meta = CliMetaManager().get_meta_info() assert meta.version == '5.5.5' assert meta.config_stream == 'stable' - assert meta.docker_lvmpy_stream == '1.2.3' + assert meta.docker_lvmpy_version == '1.2.3' assert meta.os_id == 'ubuntu' assert meta.os_version == '20.04' @@ -156,7 +156,7 @@ def test_fair_compose_default_meta(): assert meta.config_stream == '1.1.0' assert meta.os_id == 'ubuntu' assert meta.os_version == '18.04' - assert not hasattr(meta, 'docker_lvmpy_stream') + assert not hasattr(meta, 'docker_lvmpy_version') def test_fair_save_meta(meta_file_v2): @@ -172,7 +172,7 @@ def test_fair_save_meta(meta_file_v2): 'os_id': 'debian', 'os_version': '11', } - assert 'docker_lvmpy_stream' not in saved_json + assert 'docker_lvmpy_version' not in saved_json def test_fair_update_meta_from_v2_to_v3(meta_file_v2): @@ -237,7 +237,7 @@ def test_fair_get_meta_info_raw(meta_file_v3): assert raw_meta['config_stream'] == TEST_META_V3['config_stream'] assert raw_meta['os_id'] == TEST_META_V3['os_id'] assert raw_meta['os_version'] == TEST_META_V3['os_version'] - assert 'docker_lvmpy_stream' not in raw_meta + assert 'docker_lvmpy_version' not in raw_meta def test_fair_get_meta_info_raw_empty(): @@ -257,7 +257,7 @@ def test_fair_asdict(): 'os_version': '35', } assert meta_dict == expected - assert 'docker_lvmpy_stream' not in meta_dict + assert 'docker_lvmpy_version' not in meta_dict def test_fair_meta_compatibility_with_cli_meta_file(meta_file_v3): @@ -266,21 +266,21 @@ def test_fair_meta_compatibility_with_cli_meta_file(meta_file_v3): assert meta.config_stream == TEST_META_V3['config_stream'] assert meta.os_id == TEST_META_V3['os_id'] assert meta.os_version == TEST_META_V3['os_version'] - # Should not have docker_lvmpy_stream even though it's in the file - assert not hasattr(meta, 'docker_lvmpy_stream') + # Should not have docker_lvmpy_version even though it's in the file + assert not hasattr(meta, 'docker_lvmpy_version') def test_fair_save_meta_overwrites_cli_meta(meta_file_v3): with open(META_FILEPATH) as f: original_data = json.load(f) - assert 'docker_lvmpy_stream' in original_data + assert 'docker_lvmpy_version' in original_data fair_meta = FairCliMeta(version='2.0.0', config_stream='fair-new') FairCliMetaManager().save_meta(fair_meta) with open(META_FILEPATH) as f: saved_data = json.load(f) - assert 'docker_lvmpy_stream' not in saved_data + assert 'docker_lvmpy_version' not in saved_data assert saved_data['version'] == '2.0.0' assert saved_data['config_stream'] == 'fair-new' From 21426c260282df69c0c178a704671b5f71cf38b7 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 26 Aug 2025 17:07:15 +0100 Subject: [PATCH 210/332] Add staking commands, update README --- README.md | 98 ++++++++++++++++++++++++++++++++- node_cli/cli/staking.py | 88 ++++++++++++++++++++++++++++++ node_cli/configs/routes.py | 10 ++++ node_cli/fair/staking.py | 109 +++++++++++++++++++++++++++++++++++++ node_cli/main.py | 2 + tests/routes_test.py | 8 +++ 6 files changed, 314 insertions(+), 1 deletion(-) create mode 100644 node_cli/cli/staking.py create mode 100644 node_cli/fair/staking.py diff --git a/README.md b/README.md index 808e1b02..e06bd98e 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,8 @@ SKALE Node CLI, part of the SKALE suite of validator tools, is the command line 5. [Fair Wallet commands](#fair-wallet-commands) 6. [Fair Logs commands](#fair-logs-commands) 7. [Fair SSL commands](#fair-ssl-commands) - 8. [Passive Fair Node commands](#passive-fair-node-commands) + 8. [Fair Staking commands](#fair-staking-commands) + 9. [Passive Fair Node commands](#passive-fair-node-commands) 5. [Exit codes](#exit-codes) 6. [Development](#development) @@ -1067,6 +1068,101 @@ Options: * `--no-client` - Skip client connection for openssl check. * `--no-wss` - Skip WSS server starting for skaled check. +### Fair Staking commands + +> Prefix: `fair staking` + +Commands for interacting with the Fair staking functionality. + +#### Add allowed receiver + +Allow an address to receive staking fees. + +```shell +fair staking add-allowed-receiver +``` + +Arguments: + +* `RECEIVER_ADDRESS` - Address to add to the allowed receivers list. + +#### Remove allowed receiver + +Remove an address from the allowed receivers list. + +```shell +fair staking remove-allowed-receiver +``` + +Arguments: + +* `RECEIVER_ADDRESS` - Address to remove from the allowed receivers list. + +#### Send all fees + +Send all accumulated fees to the specified address. + +```shell +fair staking send-all-fees +``` + +Arguments: + +* `TO_ADDRESS` - Destination address to receive all fees. + +#### Claim all fees + +Claim all accumulated fees to the node wallet. + +```shell +fair staking claim-all-fees +``` + +#### Set fee rate + +Set the fee rate (uint16 value) used by the staking logic. + +```shell +fair staking set-fee-rate +``` + +Arguments: + +* `FEE_RATE` - Fee rate value as integer (uint16). + +#### Claim fees + +Claim a specific amount of fees to the node wallet. + +```shell +fair staking claim-fees +``` + +Arguments: + +* `AMOUNT` - Amount of fees to claim (FAIR). + +#### Send fees + +Send a specific amount of fees to an address. + +```shell +fair staking send-fees +``` + +Arguments: + +* `TO_ADDRESS` - Destination address for the fee transfer. +* `AMOUNT` - Amount of fees to send (FAIR). + +#### Get earned fee amount + +Get the currently earned fee amount. + +```shell +fair staking get-earned-fee-amount +``` + ### Passive Fair Node commands > Prefix: `fair passive-node` (passive Fair build) diff --git a/node_cli/cli/staking.py b/node_cli/cli/staking.py new file mode 100644 index 00000000..5286fa38 --- /dev/null +++ b/node_cli/cli/staking.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import click + +from node_cli.fair.staking import ( + add_allowed_receiver, + remove_allowed_receiver, + send_all_fees, + claim_all_fees, + set_fee_rate, + claim_fees, + send_fees, + get_earned_fee_amount, +) + + +@click.group() +def staking_cli(): + pass + + +@staking_cli.group(help='Staking commands') +def staking(): + pass + + +@staking.command('add-allowed-receiver', help='Add allowed receiver') +@click.argument('receiver') +def _add_allowed_receiver(receiver: str) -> None: + add_allowed_receiver(receiver) + + +@staking.command('remove-allowed-receiver', help='Remove allowed receiver') +@click.argument('receiver') +def _remove_allowed_receiver(receiver: str) -> None: + remove_allowed_receiver(receiver) + + +@staking.command('send-all-fees', help='Send all fees to address') +@click.argument('to') +def _send_all_fees(to: str) -> None: + send_all_fees(to) + + +@staking.command('claim-all-fees', help='Claim all fees') +def _claim_all_fees() -> None: + claim_all_fees() + + +@staking.command('set-fee-rate', help='Set fee rate (uint16)') +@click.argument('fee_rate', type=int) +def _set_fee_rate(fee_rate: int) -> None: + set_fee_rate(fee_rate) + + +@staking.command('claim-fees', help='Claim fees amount (FAIR)') +@click.argument('amount', type=float) +def _claim_fees(amount: float) -> None: + claim_fees(amount) + + +@staking.command('send-fees', help='Send fees to address') +@click.argument('to') +@click.argument('amount', type=float) +def _send_fees(to: str, amount: float) -> None: + send_fees(to, amount) + + +@staking.command('get-earned-fee-amount', help='Get earned fee amount') +def _get_earned_fee_amount() -> None: + get_earned_fee_amount() diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index 7d7d9e9c..6f0829b3 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -43,6 +43,16 @@ 'fair-node': ['info', 'register', 'set-domain-name', 'change-ip', 'exit'], 'fair-chain': ['record', 'checks'], 'fair-node-passive': ['setup'], + 'fair-staking': [ + 'add-allowed-receiver', + 'remove-allowed-receiver', + 'send-all-fees', + 'claim-all-fees', + 'set-fee-rate', + 'claim-fees', + 'send-fees', + 'get-earned-fee-amount', + ], } } diff --git a/node_cli/fair/staking.py b/node_cli/fair/staking.py new file mode 100644 index 00000000..771d2b99 --- /dev/null +++ b/node_cli/fair/staking.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2025-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from typing import Any, Optional + +from node_cli.core.host import is_node_inited +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit, post_request + +BLUEPRINT_NAME = 'fair-staking' + + +def _handle_response(status: str, payload: Any, success: Optional[str] = None) -> None: + if status == 'ok': + print(success if success is not None else 'OK') + else: + error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +def add_allowed_receiver(receiver: str) -> None: + if not is_node_inited(): + print('Node is not initialized') + return + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='add-allowed-receiver', json={'receiver': receiver} + ) + _handle_response(status, payload, success=f'Allowed receiver added: {receiver}') + + +def remove_allowed_receiver(receiver: str) -> None: + if not is_node_inited(): + print('Node is not initialized') + return + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='remove-allowed-receiver', json={'receiver': receiver} + ) + _handle_response(status, payload, success=f'Allowed receiver removed: {receiver}') + + +def send_all_fees(to: str) -> None: + if not is_node_inited(): + print('Node is not initialized') + return + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='send-all-fees', json={'to': to} + ) + _handle_response(status, payload, success=f'All fees sent to {to}') + + +def claim_all_fees() -> None: + if not is_node_inited(): + print('Node is not initialized') + return + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='claim-all-fees') + _handle_response(status, payload, success='All fees claimed') + + +def set_fee_rate(fee_rate: int) -> None: + if not is_node_inited(): + print('Node is not initialized') + return + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='set-fee-rate', json={'feeRate': fee_rate} + ) + _handle_response(status, payload, success=f'Fee rate set to {fee_rate}') + + +def claim_fees(amount: float) -> None: + if not is_node_inited(): + print('Node is not initialized') + return + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='claim-fees', json={'amount': amount} + ) + _handle_response(status, payload, success=f'Fees claimed: {amount}') + + +def send_fees(to: str, amount: float) -> None: + if not is_node_inited(): + print('Node is not initialized') + return + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='send-fees', json={'to': to, 'amount': amount} + ) + _handle_response(status, payload, success=f'Fees sent: {amount} to {to}') + + +def get_earned_fee_amount() -> None: + if not is_node_inited(): + print('Node is not initialized') + return + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='get-earned-fee-amount') + _handle_response(status, payload, success=f'Earned fee amount: {payload}') diff --git a/node_cli/main.py b/node_cli/main.py index 8e734d17..a18b4d67 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -41,6 +41,7 @@ from node_cli.cli.fair_node import fair_node_cli from node_cli.cli.passive_fair_node import passive_fair_node_cli from node_cli.cli.chain import chain_cli +from node_cli.cli.staking import staking_cli from node_cli.core.host import init_logs_dir from node_cli.utils.node_type import NodeType from node_cli.configs import LONG_LINE @@ -92,6 +93,7 @@ def get_sources_list() -> List[click.MultiCommand]: fair_node_cli, passive_fair_node_cli, chain_cli, + staking_cli, wallet_cli, ssl_cli, ] diff --git a/tests/routes_test.py b/tests/routes_test.py index 19845fba..958a4683 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -39,6 +39,14 @@ '/api/v1/fair-chain/record', '/api/v1/fair-chain/checks', '/api/v1/fair-node-passive/setup', + '/api/v1/staking/add-allowed-receiver', + '/api/v1/staking/remove-allowed-receiver', + '/api/v1/staking/send-all-fees', + '/api/v1/staking/claim-all-fees', + '/api/v1/staking/set-fee-rate', + '/api/v1/staking/claim-fees', + '/api/v1/staking/send-fees', + '/api/v1/staking/get-earned-fee-amount', ] From e5a4995ed790b863ea5714812f49e13963ef2724 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 26 Aug 2025 19:08:08 +0100 Subject: [PATCH 211/332] Fix routes test --- node_cli/utils/helper.py | 15 ++++++++++++++- tests/routes_test.py | 16 ++++++++-------- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 2eb778fb..b0bff97e 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -197,13 +197,26 @@ def abort_if_false(ctx, param, value): def post_request(blueprint, method, json=None, files=None): + print('in post request') route = get_route(blueprint, method) url = construct_url(route) + logger.info('url: ') + logger.info(url) try: + logger.info('json: ') + print(json) response = requests.post(url, json=json, files=files) + + logger.info('--------') + logger.info(response) + logger.info(response.status_code) + logger.info(response.reason) + logger.info(response.raw) + logger.info('=========') + data = response.json() except Exception as err: - logger.error('Request failed', exc_info=err) + logger.exception('Request failed', exc_info=err) data = DEFAULT_ERROR_DATA status = data['status'] payload = data['payload'] diff --git a/tests/routes_test.py b/tests/routes_test.py index 958a4683..bfb44d4c 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -39,14 +39,14 @@ '/api/v1/fair-chain/record', '/api/v1/fair-chain/checks', '/api/v1/fair-node-passive/setup', - '/api/v1/staking/add-allowed-receiver', - '/api/v1/staking/remove-allowed-receiver', - '/api/v1/staking/send-all-fees', - '/api/v1/staking/claim-all-fees', - '/api/v1/staking/set-fee-rate', - '/api/v1/staking/claim-fees', - '/api/v1/staking/send-fees', - '/api/v1/staking/get-earned-fee-amount', + '/api/v1/fair-staking/add-allowed-receiver', + '/api/v1/fair-staking/remove-allowed-receiver', + '/api/v1/fair-staking/send-all-fees', + '/api/v1/fair-staking/claim-all-fees', + '/api/v1/fair-staking/set-fee-rate', + '/api/v1/fair-staking/claim-fees', + '/api/v1/fair-staking/send-fees', + '/api/v1/fair-staking/get-earned-fee-amount', ] From b3e3e5e81bd042eb79b65c6bdb9cad31e417cf66 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Tue, 26 Aug 2025 20:05:44 +0100 Subject: [PATCH 212/332] Add confirmation to all staking commands --- node_cli/cli/staking.py | 52 +++++++++++++++++++++++++++++++++++++++- node_cli/fair/staking.py | 7 +++++- node_cli/utils/helper.py | 15 +----------- 3 files changed, 58 insertions(+), 16 deletions(-) diff --git a/node_cli/cli/staking.py b/node_cli/cli/staking.py index 5286fa38..c23ad75a 100644 --- a/node_cli/cli/staking.py +++ b/node_cli/cli/staking.py @@ -29,6 +29,7 @@ send_fees, get_earned_fee_amount, ) +from node_cli.utils.helper import abort_if_false @click.group() @@ -43,35 +44,77 @@ def staking(): @staking.command('add-allowed-receiver', help='Add allowed receiver') @click.argument('receiver') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to add allowed receiver?', +) def _add_allowed_receiver(receiver: str) -> None: add_allowed_receiver(receiver) @staking.command('remove-allowed-receiver', help='Remove allowed receiver') @click.argument('receiver') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to remove allowed receiver?', +) def _remove_allowed_receiver(receiver: str) -> None: remove_allowed_receiver(receiver) @staking.command('send-all-fees', help='Send all fees to address') @click.argument('to') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to send all fees?', +) def _send_all_fees(to: str) -> None: send_all_fees(to) @staking.command('claim-all-fees', help='Claim all fees') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to claim all fees?', +) def _claim_all_fees() -> None: claim_all_fees() -@staking.command('set-fee-rate', help='Set fee rate (uint16)') +@staking.command('set-fee-rate', help='Set fee rate (uint16, basis points; 25 = 2.5%)') @click.argument('fee_rate', type=int) +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to set fee rate?', +) def _set_fee_rate(fee_rate: int) -> None: set_fee_rate(fee_rate) @staking.command('claim-fees', help='Claim fees amount (FAIR)') @click.argument('amount', type=float) +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to claim fees?', +) def _claim_fees(amount: float) -> None: claim_fees(amount) @@ -79,6 +122,13 @@ def _claim_fees(amount: float) -> None: @staking.command('send-fees', help='Send fees to address') @click.argument('to') @click.argument('amount', type=float) +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to send fees?', +) def _send_fees(to: str, amount: float) -> None: send_fees(to, amount) diff --git a/node_cli/fair/staking.py b/node_cli/fair/staking.py index 771d2b99..9c21dd7c 100644 --- a/node_cli/fair/staking.py +++ b/node_cli/fair/staking.py @@ -106,4 +106,9 @@ def get_earned_fee_amount() -> None: print('Node is not initialized') return status, payload = post_request(blueprint=BLUEPRINT_NAME, method='get-earned-fee-amount') - _handle_response(status, payload, success=f'Earned fee amount: {payload}') + if status == 'ok' and isinstance(payload, dict): + amount_wei = payload.get('amount_wei') + amount_ether = payload.get('amount_ether') + print(f'Earned fee amount: {amount_wei} wei ({amount_ether} FAIR)') + return + error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index b0bff97e..d299fbf6 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -69,7 +69,7 @@ DEFAULT_ERROR_DATA = { 'status': 'error', - 'payload': 'Request failed. Check skale_api container logs', + 'payload': 'Request failed. Check API container logs', } @@ -197,23 +197,10 @@ def abort_if_false(ctx, param, value): def post_request(blueprint, method, json=None, files=None): - print('in post request') route = get_route(blueprint, method) url = construct_url(route) - logger.info('url: ') - logger.info(url) try: - logger.info('json: ') - print(json) response = requests.post(url, json=json, files=files) - - logger.info('--------') - logger.info(response) - logger.info(response.status_code) - logger.info(response.reason) - logger.info(response.raw) - logger.info('=========') - data = response.json() except Exception as err: logger.exception('Request failed', exc_info=err) From f119f4f837cb0031a28445be2473b4bb42845478 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 27 Aug 2025 18:12:28 +0100 Subject: [PATCH 213/332] Merge send fees commands, update api routes --- .github/copilot-instructions.md | 3 +- README.md | 20 +++-------- node_cli/cli/staking.py | 29 +++++----------- node_cli/configs/routes.py | 5 ++- node_cli/fair/staking.py | 59 ++++++++++----------------------- tests/routes_test.py | 5 ++- 6 files changed, 37 insertions(+), 84 deletions(-) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 416cfaf6..a5c8ebdf 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -13,4 +13,5 @@ - check `ruff.toml` for formatting rules - always lint changes using `ruff check` -- tests should be placed in `tests/` directory, follow the existing structure and code style \ No newline at end of file +- tests should be placed in `tests/` directory, follow the existing structure and code style +- to run a test always use `bash scripts/run_tests.sh tests/path_to_test.py -k [TEST_NAME]` command \ No newline at end of file diff --git a/README.md b/README.md index e06bd98e..eab1c67a 100644 --- a/README.md +++ b/README.md @@ -1079,7 +1079,7 @@ Commands for interacting with the Fair staking functionality. Allow an address to receive staking fees. ```shell -fair staking add-allowed-receiver +fair staking add-receiver ``` Arguments: @@ -1091,25 +1091,13 @@ Arguments: Remove an address from the allowed receivers list. ```shell -fair staking remove-allowed-receiver +fair staking remove-receiver ``` Arguments: * `RECEIVER_ADDRESS` - Address to remove from the allowed receivers list. -#### Send all fees - -Send all accumulated fees to the specified address. - -```shell -fair staking send-all-fees -``` - -Arguments: - -* `TO_ADDRESS` - Destination address to receive all fees. - #### Claim all fees Claim all accumulated fees to the node wallet. @@ -1144,7 +1132,7 @@ Arguments: #### Send fees -Send a specific amount of fees to an address. +Send a specific amount of fees to the default allowed receiver. ```shell fair staking send-fees @@ -1153,7 +1141,7 @@ fair staking send-fees Arguments: * `TO_ADDRESS` - Destination address for the fee transfer. -* `AMOUNT` - Amount of fees to send (FAIR). +* `AMOUNT` - Amount of fees to send (FAIR). Use `--all` to send all. #### Get earned fee amount diff --git a/node_cli/cli/staking.py b/node_cli/cli/staking.py index c23ad75a..ddd8a2ab 100644 --- a/node_cli/cli/staking.py +++ b/node_cli/cli/staking.py @@ -22,7 +22,6 @@ from node_cli.fair.staking import ( add_allowed_receiver, remove_allowed_receiver, - send_all_fees, claim_all_fees, set_fee_rate, claim_fees, @@ -42,7 +41,7 @@ def staking(): pass -@staking.command('add-allowed-receiver', help='Add allowed receiver') +@staking.command('add-receiver', help='Add allowed receiver') @click.argument('receiver') @click.option( '--yes', @@ -55,7 +54,7 @@ def _add_allowed_receiver(receiver: str) -> None: add_allowed_receiver(receiver) -@staking.command('remove-allowed-receiver', help='Remove allowed receiver') +@staking.command('remove-receiver', help='Remove allowed receiver') @click.argument('receiver') @click.option( '--yes', @@ -68,19 +67,6 @@ def _remove_allowed_receiver(receiver: str) -> None: remove_allowed_receiver(receiver) -@staking.command('send-all-fees', help='Send all fees to address') -@click.argument('to') -@click.option( - '--yes', - is_flag=True, - callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to send all fees?', -) -def _send_all_fees(to: str) -> None: - send_all_fees(to) - - @staking.command('claim-all-fees', help='Claim all fees') @click.option( '--yes', @@ -119,9 +105,10 @@ def _claim_fees(amount: float) -> None: claim_fees(amount) -@staking.command('send-fees', help='Send fees to address') +@staking.command('send-fees', help='Send fees to address (or all with --all)') @click.argument('to') -@click.argument('amount', type=float) +@click.argument('value', type=float, required=False) +@click.option('--all', 'send_all', is_flag=True, help='Send all fees to address') @click.option( '--yes', is_flag=True, @@ -129,8 +116,10 @@ def _claim_fees(amount: float) -> None: expose_value=False, prompt='Are you sure you want to send fees?', ) -def _send_fees(to: str, amount: float) -> None: - send_fees(to, amount) +def _send_fees(to: str, value: float | None, send_all: bool) -> None: + if value is None and not send_all: + raise click.UsageError('Provide or use --all') + send_fees(to, None if send_all else value) @staking.command('get-earned-fee-amount', help='Get earned fee amount') diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index 6f0829b3..5eba041f 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -44,9 +44,8 @@ 'fair-chain': ['record', 'checks'], 'fair-node-passive': ['setup'], 'fair-staking': [ - 'add-allowed-receiver', - 'remove-allowed-receiver', - 'send-all-fees', + 'add-receiver', + 'remove-receiver', 'claim-all-fees', 'set-fee-rate', 'claim-fees', diff --git a/node_cli/fair/staking.py b/node_cli/fair/staking.py index 9c21dd7c..d9469638 100644 --- a/node_cli/fair/staking.py +++ b/node_cli/fair/staking.py @@ -17,94 +17,71 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -from typing import Any, Optional +from typing import Any -from node_cli.core.host import is_node_inited +from node_cli.utils.decorators import check_inited from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import error_exit, post_request BLUEPRINT_NAME = 'fair-staking' -def _handle_response(status: str, payload: Any, success: Optional[str] = None) -> None: +def _handle_response(status: str, payload: Any, success: str | None = None) -> None: if status == 'ok': print(success if success is not None else 'OK') else: error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) +@check_inited def add_allowed_receiver(receiver: str) -> None: - if not is_node_inited(): - print('Node is not initialized') - return status, payload = post_request( - blueprint=BLUEPRINT_NAME, method='add-allowed-receiver', json={'receiver': receiver} + blueprint=BLUEPRINT_NAME, method='add-receiver', json={'receiver': receiver} ) _handle_response(status, payload, success=f'Allowed receiver added: {receiver}') +@check_inited def remove_allowed_receiver(receiver: str) -> None: - if not is_node_inited(): - print('Node is not initialized') - return status, payload = post_request( - blueprint=BLUEPRINT_NAME, method='remove-allowed-receiver', json={'receiver': receiver} + blueprint=BLUEPRINT_NAME, method='remove-receiver', json={'receiver': receiver} ) _handle_response(status, payload, success=f'Allowed receiver removed: {receiver}') -def send_all_fees(to: str) -> None: - if not is_node_inited(): - print('Node is not initialized') - return - status, payload = post_request( - blueprint=BLUEPRINT_NAME, method='send-all-fees', json={'to': to} - ) - _handle_response(status, payload, success=f'All fees sent to {to}') +@check_inited +def send_fees(to: str, value: float | None) -> None: + json_data: dict[str, Any] = {'to': to} + if value is not None: + json_data['value'] = value + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='send-fees', json=json_data) + _handle_response(status, payload, success=f'Fees sent to {to}') +@check_inited def claim_all_fees() -> None: - if not is_node_inited(): - print('Node is not initialized') - return status, payload = post_request(blueprint=BLUEPRINT_NAME, method='claim-all-fees') _handle_response(status, payload, success='All fees claimed') +@check_inited def set_fee_rate(fee_rate: int) -> None: - if not is_node_inited(): - print('Node is not initialized') - return status, payload = post_request( blueprint=BLUEPRINT_NAME, method='set-fee-rate', json={'feeRate': fee_rate} ) _handle_response(status, payload, success=f'Fee rate set to {fee_rate}') +@check_inited def claim_fees(amount: float) -> None: - if not is_node_inited(): - print('Node is not initialized') - return status, payload = post_request( blueprint=BLUEPRINT_NAME, method='claim-fees', json={'amount': amount} ) _handle_response(status, payload, success=f'Fees claimed: {amount}') -def send_fees(to: str, amount: float) -> None: - if not is_node_inited(): - print('Node is not initialized') - return - status, payload = post_request( - blueprint=BLUEPRINT_NAME, method='send-fees', json={'to': to, 'amount': amount} - ) - _handle_response(status, payload, success=f'Fees sent: {amount} to {to}') - - +@check_inited def get_earned_fee_amount() -> None: - if not is_node_inited(): - print('Node is not initialized') - return status, payload = post_request(blueprint=BLUEPRINT_NAME, method='get-earned-fee-amount') if status == 'ok' and isinstance(payload, dict): amount_wei = payload.get('amount_wei') diff --git a/tests/routes_test.py b/tests/routes_test.py index bfb44d4c..6b18e4e1 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -39,9 +39,8 @@ '/api/v1/fair-chain/record', '/api/v1/fair-chain/checks', '/api/v1/fair-node-passive/setup', - '/api/v1/fair-staking/add-allowed-receiver', - '/api/v1/fair-staking/remove-allowed-receiver', - '/api/v1/fair-staking/send-all-fees', + '/api/v1/fair-staking/add-receiver', + '/api/v1/fair-staking/remove-receiver', '/api/v1/fair-staking/claim-all-fees', '/api/v1/fair-staking/set-fee-rate', '/api/v1/fair-staking/claim-fees', From af5cc41e831ad78a33ebd5ad6fdd83535781c913 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 27 Aug 2025 19:10:31 +0100 Subject: [PATCH 214/332] Rename value to amount in send fees --- node_cli/cli/staking.py | 10 +++++----- node_cli/fair/staking.py | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/node_cli/cli/staking.py b/node_cli/cli/staking.py index ddd8a2ab..2fce0f6b 100644 --- a/node_cli/cli/staking.py +++ b/node_cli/cli/staking.py @@ -107,7 +107,7 @@ def _claim_fees(amount: float) -> None: @staking.command('send-fees', help='Send fees to address (or all with --all)') @click.argument('to') -@click.argument('value', type=float, required=False) +@click.argument('amount', type=float, required=False) @click.option('--all', 'send_all', is_flag=True, help='Send all fees to address') @click.option( '--yes', @@ -116,10 +116,10 @@ def _claim_fees(amount: float) -> None: expose_value=False, prompt='Are you sure you want to send fees?', ) -def _send_fees(to: str, value: float | None, send_all: bool) -> None: - if value is None and not send_all: - raise click.UsageError('Provide or use --all') - send_fees(to, None if send_all else value) +def _send_fees(to: str, amount: float | None, send_all: bool) -> None: + if amount is None and not send_all: + raise click.UsageError('Provide or use --all') + send_fees(to, None if send_all else amount) @staking.command('get-earned-fee-amount', help='Get earned fee amount') diff --git a/node_cli/fair/staking.py b/node_cli/fair/staking.py index d9469638..ca3a8f80 100644 --- a/node_cli/fair/staking.py +++ b/node_cli/fair/staking.py @@ -50,10 +50,10 @@ def remove_allowed_receiver(receiver: str) -> None: @check_inited -def send_fees(to: str, value: float | None) -> None: +def send_fees(to: str, amount: float | None) -> None: json_data: dict[str, Any] = {'to': to} - if value is not None: - json_data['value'] = value + if amount is not None: + json_data['amount'] = amount status, payload = post_request(blueprint=BLUEPRINT_NAME, method='send-fees', json=json_data) _handle_response(status, payload, success=f'Fees sent to {to}') From 7a960299a8126720747e915445a65b640ade95ea Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 27 Aug 2025 19:32:22 +0100 Subject: [PATCH 215/332] Merge claim fees commands --- README.md | 19 ++++--------------- node_cli/cli/staking.py | 24 +++++++----------------- node_cli/configs/routes.py | 1 - node_cli/fair/staking.py | 19 +++++++++---------- tests/routes_test.py | 1 - 5 files changed, 20 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index eab1c67a..a50d8a3e 100644 --- a/README.md +++ b/README.md @@ -1098,12 +1098,13 @@ Arguments: * `RECEIVER_ADDRESS` - Address to remove from the allowed receivers list. -#### Claim all fees +#### Claim fees -Claim all accumulated fees to the node wallet. +Claim a specific amount of fees or all fees to the node wallet. ```shell -fair staking claim-all-fees +fair staking claim-fees +fair staking claim-fees --all ``` #### Set fee rate @@ -1118,18 +1119,6 @@ Arguments: * `FEE_RATE` - Fee rate value as integer (uint16). -#### Claim fees - -Claim a specific amount of fees to the node wallet. - -```shell -fair staking claim-fees -``` - -Arguments: - -* `AMOUNT` - Amount of fees to claim (FAIR). - #### Send fees Send a specific amount of fees to the default allowed receiver. diff --git a/node_cli/cli/staking.py b/node_cli/cli/staking.py index 2fce0f6b..8cde234c 100644 --- a/node_cli/cli/staking.py +++ b/node_cli/cli/staking.py @@ -22,7 +22,6 @@ from node_cli.fair.staking import ( add_allowed_receiver, remove_allowed_receiver, - claim_all_fees, set_fee_rate, claim_fees, send_fees, @@ -67,18 +66,6 @@ def _remove_allowed_receiver(receiver: str) -> None: remove_allowed_receiver(receiver) -@staking.command('claim-all-fees', help='Claim all fees') -@click.option( - '--yes', - is_flag=True, - callback=abort_if_false, - expose_value=False, - prompt='Are you sure you want to claim all fees?', -) -def _claim_all_fees() -> None: - claim_all_fees() - - @staking.command('set-fee-rate', help='Set fee rate (uint16, basis points; 25 = 2.5%)') @click.argument('fee_rate', type=int) @click.option( @@ -92,8 +79,9 @@ def _set_fee_rate(fee_rate: int) -> None: set_fee_rate(fee_rate) -@staking.command('claim-fees', help='Claim fees amount (FAIR)') -@click.argument('amount', type=float) +@staking.command('claim-fees', help='Claim fees amount (FAIR) or all with --all') +@click.argument('amount', type=float, required=False) +@click.option('--all', 'claim_all', is_flag=True, help='Claim all fees') @click.option( '--yes', is_flag=True, @@ -101,8 +89,10 @@ def _set_fee_rate(fee_rate: int) -> None: expose_value=False, prompt='Are you sure you want to claim fees?', ) -def _claim_fees(amount: float) -> None: - claim_fees(amount) +def _claim_fees(amount: float | None, claim_all: bool) -> None: + if amount is None and not claim_all: + raise click.UsageError('Provide or use --all') + claim_fees(None if claim_all else amount) @staking.command('send-fees', help='Send fees to address (or all with --all)') diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index 5eba041f..83bb2866 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -46,7 +46,6 @@ 'fair-staking': [ 'add-receiver', 'remove-receiver', - 'claim-all-fees', 'set-fee-rate', 'claim-fees', 'send-fees', diff --git a/node_cli/fair/staking.py b/node_cli/fair/staking.py index ca3a8f80..9e81e69f 100644 --- a/node_cli/fair/staking.py +++ b/node_cli/fair/staking.py @@ -58,12 +58,6 @@ def send_fees(to: str, amount: float | None) -> None: _handle_response(status, payload, success=f'Fees sent to {to}') -@check_inited -def claim_all_fees() -> None: - status, payload = post_request(blueprint=BLUEPRINT_NAME, method='claim-all-fees') - _handle_response(status, payload, success='All fees claimed') - - @check_inited def set_fee_rate(fee_rate: int) -> None: status, payload = post_request( @@ -73,11 +67,16 @@ def set_fee_rate(fee_rate: int) -> None: @check_inited -def claim_fees(amount: float) -> None: - status, payload = post_request( - blueprint=BLUEPRINT_NAME, method='claim-fees', json={'amount': amount} +def claim_fees(amount: float | None) -> None: + json_data: dict[str, Any] = {} + if amount is not None: + json_data['amount'] = amount + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='claim-fees', json=json_data) + _handle_response( + status, + payload, + success='All fees claimed' if amount is None else f'Fees claimed: {amount}', ) - _handle_response(status, payload, success=f'Fees claimed: {amount}') @check_inited diff --git a/tests/routes_test.py b/tests/routes_test.py index 6b18e4e1..b64e9e0a 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -41,7 +41,6 @@ '/api/v1/fair-node-passive/setup', '/api/v1/fair-staking/add-receiver', '/api/v1/fair-staking/remove-receiver', - '/api/v1/fair-staking/claim-all-fees', '/api/v1/fair-staking/set-fee-rate', '/api/v1/fair-staking/claim-fees', '/api/v1/fair-staking/send-fees', From c3af5509b1ba11a9d5011afdfcce530435e47ad2 Mon Sep 17 00:00:00 2001 From: Mikolaj Kucharski Date: Tue, 17 Jun 2025 18:04:36 +0000 Subject: [PATCH 216/332] Fix check_ssl_connection() function Function doesn't wait for `openssl s_client ...` to finish. It assumes that when the command is still running that is the successful condition. However the function should wait for exit code from the binary. We saw in production intermittent and very often `skale ssl upload` failures. This change should fix this problem and underlying race condition. --- node_cli/core/ssl/check.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/node_cli/core/ssl/check.py b/node_cli/core/ssl/check.py index 289bc77e..a82ea11b 100644 --- a/node_cli/core/ssl/check.py +++ b/node_cli/core/ssl/check.py @@ -196,8 +196,15 @@ def check_ssl_connection(host, port, silent=False): ] expose_output = not silent with detached_subprocess(ssl_check_cmd, expose_output=expose_output) as dp: - time.sleep(1) - code = dp.poll() - if code is not None: + for _ in range(10): + code = dp.poll() + if code is None: + logger.info('Healthcheck process still running...') + time.sleep(2) + continue + elif code == 0: + return logger.error('Healthcheck connection failed') raise SSLHealthcheckError('OpenSSL connection verification failed') + logger.error('Healthcheck timed-out') + raise SSLHealthcheckError('OpenSSL connection verification timed-out') From a5c70aa7f8e210e951e9a836c20566e3596cef12 Mon Sep 17 00:00:00 2001 From: Mikolaj Kucharski Date: Mon, 23 Jun 2025 18:55:53 +0000 Subject: [PATCH 217/332] Move to dp.wait() in check_ssl_connection() Replace for loop and dp.poll() with more straightforward dp.wait() with a timeout, as requested during diff review. --- node_cli/core/ssl/check.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/node_cli/core/ssl/check.py b/node_cli/core/ssl/check.py index a82ea11b..327f72a6 100644 --- a/node_cli/core/ssl/check.py +++ b/node_cli/core/ssl/check.py @@ -20,6 +20,7 @@ import time import socket import logging +import subprocess from contextlib import contextmanager from node_cli.core.ssl.utils import detached_subprocess @@ -196,15 +197,15 @@ def check_ssl_connection(host, port, silent=False): ] expose_output = not silent with detached_subprocess(ssl_check_cmd, expose_output=expose_output) as dp: - for _ in range(10): - code = dp.poll() - if code is None: - logger.info('Healthcheck process still running...') - time.sleep(2) - continue - elif code == 0: - return - logger.error('Healthcheck connection failed') - raise SSLHealthcheckError('OpenSSL connection verification failed') - logger.error('Healthcheck timed-out') - raise SSLHealthcheckError('OpenSSL connection verification timed-out') + timeout = 20 + try: + dp.wait(timeout=timeout) + except subprocess.TimeoutExpired: + logger.error('Healthcheck timed-out after %s s', timeout) + raise SSLHealthcheckError('OpenSSL connection verification timed-out') + + if dp.returncode == 0: # success + return + + logger.error('Healthcheck connection failed (code %s)', dp.returncode) + raise SSLHealthcheckError('OpenSSL connection verification failed') From 54ab23ae6a8d405b5891f93db1ad6ed51d24f568 Mon Sep 17 00:00:00 2001 From: Mikolaj Kucharski Date: Mon, 23 Jun 2025 20:25:04 +0000 Subject: [PATCH 218/332] Read from /dev/null in detached_subprocess() Redirect the child's standard input to subprocess.DEVNULL, so it starts with no stdin attached. This prevents the OpenSSL health-check process from reading from, or blocking on, the parent's terminal or execution environment stdin stream. --- node_cli/core/ssl/utils.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/node_cli/core/ssl/utils.py b/node_cli/core/ssl/utils.py index a5a329dd..c80b3a8a 100644 --- a/node_cli/core/ssl/utils.py +++ b/node_cli/core/ssl/utils.py @@ -17,13 +17,13 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import logging import os import shutil -import logging import subprocess from contextlib import contextmanager -from node_cli.configs.ssl import SSL_CERT_FILEPATH, SSL_KEY_FILEPATH, SSL_FOLDER_PATH +from node_cli.configs.ssl import SSL_CERT_FILEPATH, SSL_FOLDER_PATH, SSL_KEY_FILEPATH logger = logging.getLogger(__name__) @@ -48,7 +48,13 @@ def is_ssl_folder_empty(ssl_path=SSL_FOLDER_PATH): @contextmanager def detached_subprocess(cmd, expose_output=False): logger.debug(f'Starting detached subprocess: {cmd}') - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf-8') + p = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + stdin=subprocess.DEVNULL, + encoding='utf-8', + ) try: yield p finally: From 2272e94affdd21986793c2bc930c5139010e8ec6 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 3 Sep 2025 13:46:27 +0100 Subject: [PATCH 219/332] Rename compose services, update container names --- node_cli/core/node.py | 2 +- node_cli/fair/common.py | 2 +- node_cli/operations/fair.py | 6 ++-- node_cli/utils/docker_utils.py | 62 ++++++++++++++-------------------- tests/cli/health_test.py | 4 +-- 5 files changed, 32 insertions(+), 44 deletions(-) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 3253f6c5..8586f3f9 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -110,7 +110,7 @@ class NodeStatuses(Enum): def is_update_safe(node_type: NodeType, node_mode: NodeMode) -> bool: - if not is_admin_running(node_type, node_mode): + if not is_admin_running(): if node_mode == NodeMode.PASSIVE: return True elif not is_api_running(node_type): diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index ce24ad45..9d24fa29 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -133,4 +133,4 @@ def repair_chain(snapshot_from: str = 'any') -> None: env = compose_node_env( SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode ) - repair_fair_op(node_mode=node_mode, env=env, snapshot_from=snapshot_from) + repair_fair_op(env=env, snapshot_from=snapshot_from) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 50f87c1d..d6d4334b 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -296,10 +296,10 @@ def trigger_skaled_snapshot_mode(env: dict, snapshot_from: str = 'any') -> None: record.set_snapshot_from(snapshot_from) -def repair(node_mode: NodeMode, env: dict, snapshot_from: str = 'any') -> None: +def repair(env: dict, snapshot_from: str = 'any') -> None: logger.info('Starting fair node repair') - container_name = 'fair_admin' - if is_admin_running(node_type=NodeType.FAIR, node_mode=node_mode): + container_name = 'sk_admin' + if is_admin_running(): logger.info('Stopping admin container') stop_container_by_name(container_name=container_name) logger.info('Removing chain container') diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 3aa46e56..60514cdb 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -48,46 +48,46 @@ TELEGRAF_REMOVE_TIMEOUT = 20 REDIS_START_TIMEOUT = 10 -REDIS_SERVICE_DICT = {'redis': 'skale_redis'} +REDIS_SERVICE_DICT = {'redis': 'sk_redis'} CORE_COMMON_COMPOSE_SERVICES = { - 'transaction-manager': 'skale_transaction-manager', - 'redis': 'skale_redis', - 'watchdog': 'skale_watchdog', - 'nginx': 'skale_nginx', - 'filebeat': 'skale_filebeat', + 'transaction-manager': 'sk_tm', + 'redis': 'sk_redis', + 'watchdog': 'sk_watchdog', + 'nginx': 'sk_nginx', + 'filebeat': 'sk_filebeat', } BASE_SKALE_COMPOSE_SERVICES = { **CORE_COMMON_COMPOSE_SERVICES, - 'skale-admin': 'skale_admin', - 'skale-api': 'skale_api', - 'bounty': 'skale_bounty', + 'admin': 'sk_admin', + 'api': 'sk_api', + 'bounty': 'sk_bounty', } BASE_FAIR_COMPOSE_SERVICES = { **CORE_COMMON_COMPOSE_SERVICES, - 'fair-admin': 'fair_admin', - 'fair-api': 'fair_api', + 'admin': 'sk_admin', + 'api': 'sk_api', } BASE_FAIR_BOOT_COMPOSE_SERVICES = { **CORE_COMMON_COMPOSE_SERVICES, - 'fair-boot': 'fair_boot_admin', - 'fair-boot-api': 'fair_boot_api', + 'boot-admin': 'sk_boot_admin', + 'boot-api': 'sk_boot_api', } BASE_PASSIVE_COMPOSE_SERVICES = { - 'skale-passive-admin': 'skale_passive_admin', - 'nginx': 'skale_nginx', + 'admin': 'sk_admin', + 'nginx': 'sk_nginx', } BASE_PASSIVE_FAIR_COMPOSE_SERVICES = { - 'fair-admin': 'fair_admin', - 'fair-api': 'fair_api', - 'nginx': 'skale_nginx', - 'watchdog': 'skale_watchdog', - 'filebeat': 'skale_filebeat', + 'admin': 'sk_admin', + 'api': 'sk_api', + 'nginx': 'sk_nginx', + 'watchdog': 'sk_watchdog', + 'filebeat': 'sk_filebeat', **REDIS_SERVICE_DICT, } @@ -120,11 +120,11 @@ def get_containers(container_name_filter=None, _all=True) -> list: def get_all_schain_containers(_all=True) -> list: - return docker_client().containers.list(all=_all, filters={'name': 'skale_schain_*'}) + return docker_client().containers.list(all=_all, filters={'name': 'sk_chain_*'}) def get_all_ima_containers(_all=True) -> list: - return docker_client().containers.list(all=_all, filters={'name': 'skale_ima_*'}) + return docker_client().containers.list(all=_all, filters={'name': 'sk_ima_*'}) def remove_dynamic_containers() -> None: @@ -203,7 +203,7 @@ def start_container_by_name(container_name: str, dclient: Optional[DockerClient] def remove_schain_container_by_name( schain_name: str, dclient: Optional[DockerClient] = None ) -> None: - container_name = f'skale_schain_{schain_name}' + container_name = f'sk_chain_{schain_name}' remove_container_by_name(container_name, timeout=SCHAIN_REMOVE_TIMEOUT, dclient=dclient) @@ -430,20 +430,8 @@ def is_api_running(node_type: NodeType, dclient: Optional[DockerClient] = None) return is_container_running(name='skale_api', dclient=dclient) -def is_admin_running( - node_type: NodeType, - node_mode: NodeMode, - dclient: Optional[DockerClient] = None, -) -> bool: - if active_fair(node_type, node_mode): - container_name = 'fair_admin' - elif passive_fair(node_type, node_mode): - container_name = 'fair_passive_admin' - elif active_skale(node_type, node_mode): - container_name = 'skale_admin' - elif passive_skale(node_type, node_mode): - container_name = 'skale_passive_admin' - return is_container_running(name=container_name, dclient=dclient) +def is_admin_running(dclient: Optional[DockerClient] = None) -> bool: + return is_container_running(name='sk_admin', dclient=dclient) def system_prune(): diff --git a/tests/cli/health_test.py b/tests/cli/health_test.py index de14c7c2..c6ea16df 100644 --- a/tests/cli/health_test.py +++ b/tests/cli/health_test.py @@ -9,7 +9,7 @@ 'payload': [ { 'image': 'skalenetwork/schain:1.46-develop.21', - 'name': 'skale_schain_shapely-alfecca-meridiana', + 'name': 'sk_chain_shapely-alfecca-meridiana', 'state': { 'Status': 'running', 'Running': True, @@ -51,7 +51,7 @@ def test_containers(): assert result.exit_code == 0 assert ( result.output - == ' Name Status Started At Image \n-------------------------------------------------------------------------------------------------------------\nskale_schain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nskale_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa + == ' Name Status Started At Image \n-------------------------------------------------------------------------------------------------------------\nsk_chain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nskale_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa ) From 4d37dbc7f5c6927c246f103cd70e80fe91167b07 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 3 Sep 2025 13:56:56 +0100 Subject: [PATCH 220/332] Revert to sk_schain prefix --- node_cli/utils/docker_utils.py | 4 ++-- tests/cli/health_test.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 60514cdb..f17644fe 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -120,7 +120,7 @@ def get_containers(container_name_filter=None, _all=True) -> list: def get_all_schain_containers(_all=True) -> list: - return docker_client().containers.list(all=_all, filters={'name': 'sk_chain_*'}) + return docker_client().containers.list(all=_all, filters={'name': 'sk_schain_*'}) def get_all_ima_containers(_all=True) -> list: @@ -203,7 +203,7 @@ def start_container_by_name(container_name: str, dclient: Optional[DockerClient] def remove_schain_container_by_name( schain_name: str, dclient: Optional[DockerClient] = None ) -> None: - container_name = f'sk_chain_{schain_name}' + container_name = f'sk_schain_{schain_name}' remove_container_by_name(container_name, timeout=SCHAIN_REMOVE_TIMEOUT, dclient=dclient) diff --git a/tests/cli/health_test.py b/tests/cli/health_test.py index c6ea16df..951b9f0d 100644 --- a/tests/cli/health_test.py +++ b/tests/cli/health_test.py @@ -9,7 +9,7 @@ 'payload': [ { 'image': 'skalenetwork/schain:1.46-develop.21', - 'name': 'sk_chain_shapely-alfecca-meridiana', + 'name': 'sk_schain_shapely-alfecca-meridiana', 'state': { 'Status': 'running', 'Running': True, @@ -51,7 +51,7 @@ def test_containers(): assert result.exit_code == 0 assert ( result.output - == ' Name Status Started At Image \n-------------------------------------------------------------------------------------------------------------\nsk_chain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nskale_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa + == ' Name Status Started At Image \n-------------------------------------------------------------------------------------------------------------\nsk_schain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nskale_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa ) From 99290e60a7759a55aff964199d3b48fb3e4ec715 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 3 Sep 2025 16:00:32 +0100 Subject: [PATCH 221/332] Update is api running helper, update containers test --- node_cli/core/node.py | 8 ++++---- node_cli/utils/docker_utils.py | 7 ++----- tests/cli/health_test.py | 4 ++-- tests/core/core_node_test.py | 10 +++++----- 4 files changed, 13 insertions(+), 16 deletions(-) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 8586f3f9..3a704ddc 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -109,11 +109,11 @@ class NodeStatuses(Enum): NOT_CREATED = 5 -def is_update_safe(node_type: NodeType, node_mode: NodeMode) -> bool: +def is_update_safe(node_mode: NodeMode) -> bool: if not is_admin_running(): if node_mode == NodeMode.PASSIVE: return True - elif not is_api_running(node_type): + elif not is_api_running(): return True status, payload = get_request(BLUEPRINT_NAME, 'update-safe') if status == 'error': @@ -298,7 +298,7 @@ def update( ) -> None: node_mode = upsert_node_mode(node_mode=node_mode) - if not unsafe_ok and not is_update_safe(node_type=node_type, node_mode=node_mode): + if not unsafe_ok and not is_update_safe(node_mode=node_mode): error_msg = 'Cannot update safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) @@ -422,7 +422,7 @@ def set_maintenance_mode_off(): @check_user def turn_off(node_type: NodeType, maintenance_on: bool = False, unsafe_ok: bool = False) -> None: node_mode = upsert_node_mode() - if not unsafe_ok and not is_update_safe(node_type=node_type, node_mode=node_mode): + if not unsafe_ok and not is_update_safe(node_mode=node_mode): error_msg = 'Cannot turn off safely' error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) if maintenance_on: diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index f17644fe..26d16bd7 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -423,11 +423,8 @@ def is_container_running(name: str, dclient: Optional[DockerClient] = None) -> b return False -def is_api_running(node_type: NodeType, dclient: Optional[DockerClient] = None) -> bool: - if node_type == NodeType.FAIR: - return is_container_running(name='fair_api', dclient=dclient) - else: - return is_container_running(name='skale_api', dclient=dclient) +def is_api_running(dclient: Optional[DockerClient] = None) -> bool: + return is_container_running(name='sk_api', dclient=dclient) def is_admin_running(dclient: Optional[DockerClient] = None) -> bool: diff --git a/tests/cli/health_test.py b/tests/cli/health_test.py index 951b9f0d..6f3fc022 100644 --- a/tests/cli/health_test.py +++ b/tests/cli/health_test.py @@ -26,7 +26,7 @@ }, { 'image': 'skale-admin:latest', - 'name': 'skale_api', + 'name': 'sk_api', 'state': { 'Status': 'running', 'Running': True, @@ -51,7 +51,7 @@ def test_containers(): assert result.exit_code == 0 assert ( result.output - == ' Name Status Started At Image \n-------------------------------------------------------------------------------------------------------------\nsk_schain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nskale_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa + == ' Name Status Started At Image \n----------------------------------------------------------------------------------------------------------\nsk_schain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nsk_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa ) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 70671418..7188ffae 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -396,7 +396,7 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n def test_is_update_safe_when_admin_and_api_not_running( mock_requests_get, mock_is_api_running, mock_is_admin_running, node_type, node_mode ): - assert is_update_safe(node_type=node_type, node_mode=node_mode) is True + assert is_update_safe(node_mode=node_mode) is True mock_requests_get.assert_not_called() @@ -406,7 +406,7 @@ def test_is_update_safe_when_admin_and_api_not_running( def test_is_update_safe_when_admin_not_running_for_passive( mock_requests_get, mock_is_api_running, mock_is_admin_running ): - assert is_update_safe(node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) is True + assert is_update_safe(node_mode=NodeMode.PASSIVE) is True mock_requests_get.assert_not_called() @@ -429,7 +429,7 @@ def test_is_update_safe_when_admin_running( mock_requests_get, mock_is_admin_running, api_is_safe, expected_result, node_type, node_mode ): mock_requests_get.return_value = safe_update_api_response(safe=api_is_safe) - assert is_update_safe(node_type=node_type, node_mode=node_mode) is expected_result + assert is_update_safe(node_mode=node_mode) is expected_result mock_requests_get.assert_called_once() @@ -451,7 +451,7 @@ def test_is_update_safe_when_only_api_running_for_regular( node_type, ): mock_requests_get.return_value = safe_update_api_response(safe=api_is_safe) - assert is_update_safe(node_type=node_type, node_mode=NodeMode.ACTIVE) is expected_result + assert is_update_safe(node_mode=NodeMode.ACTIVE) is expected_result mock_requests_get.assert_called_once() @@ -469,5 +469,5 @@ def test_is_update_safe_when_api_call_fails( mock_requests_get, mock_is_admin_running, node_type, node_mode ): mock_requests_get.side_effect = requests.exceptions.ConnectionError('Test connection error') - assert is_update_safe(node_type=node_type, node_mode=node_mode) is False + assert is_update_safe(node_mode=node_mode) is False mock_requests_get.assert_called_once() From 62167f85d6506b33a2a3c491e8156138b3259fa7 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 4 Sep 2025 15:33:37 +0100 Subject: [PATCH 222/332] Update resource alloc generation --- node_cli/core/resources.py | 2 +- tests/resources_test.py | 38 +++++++++++++++++++------------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index e62a0f6f..68686738 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -81,7 +81,7 @@ def compose_resource_allocation_config(env_type: str, params_by_env_type: Dict = schain_allocation_data = safe_load_yml(ALLOCATION_FILEPATH) return { - 'schain': { + 'skaled': { 'cpu_shares': schain_cpu_alloc.dict(), 'mem': schain_mem_alloc.dict(), 'disk': schain_allocation_data[env_type]['disk'], diff --git a/tests/resources_test.py b/tests/resources_test.py index 39effaf7..1f545080 100644 --- a/tests/resources_test.py +++ b/tests/resources_test.py @@ -81,23 +81,23 @@ def test_generate_resource_allocation_config(): with mock.patch('node_cli.core.resources.get_disk_size', return_value=NORMAL_DISK_SIZE): resource_allocation_config = compose_resource_allocation_config(DEFAULT_ENV_TYPE) - assert resource_allocation_config['schain']['cpu_shares']['test4'] == 102 - assert resource_allocation_config['schain']['cpu_shares']['test'] == 102 - assert resource_allocation_config['schain']['cpu_shares']['small'] == 6 - assert resource_allocation_config['schain']['cpu_shares']['medium'] == 102 - assert resource_allocation_config['schain']['cpu_shares']['large'] == 819 - - assert isinstance(resource_allocation_config['schain']['mem']['test4'], int) - assert isinstance(resource_allocation_config['schain']['mem']['test'], int) - assert isinstance(resource_allocation_config['schain']['mem']['small'], int) - assert isinstance(resource_allocation_config['schain']['mem']['medium'], int) - assert isinstance(resource_allocation_config['schain']['mem']['large'], int) - - assert resource_allocation_config['schain']['disk']['test4'] == 8879996928 - assert resource_allocation_config['schain']['disk']['test'] == 8879996928 - assert resource_allocation_config['schain']['disk']['small'] == 554999808 - assert resource_allocation_config['schain']['disk']['medium'] == 8879996928 - assert resource_allocation_config['schain']['disk']['large'] == 71039975424 + assert resource_allocation_config['skaled']['cpu_shares']['test4'] == 102 + assert resource_allocation_config['skaled']['cpu_shares']['test'] == 102 + assert resource_allocation_config['skaled']['cpu_shares']['small'] == 6 + assert resource_allocation_config['skaled']['cpu_shares']['medium'] == 102 + assert resource_allocation_config['skaled']['cpu_shares']['large'] == 819 + + assert isinstance(resource_allocation_config['skaled']['mem']['test4'], int) + assert isinstance(resource_allocation_config['skaled']['mem']['test'], int) + assert isinstance(resource_allocation_config['skaled']['mem']['small'], int) + assert isinstance(resource_allocation_config['skaled']['mem']['medium'], int) + assert isinstance(resource_allocation_config['skaled']['mem']['large'], int) + + assert resource_allocation_config['skaled']['disk']['test4'] == 8879996928 + assert resource_allocation_config['skaled']['disk']['test'] == 8879996928 + assert resource_allocation_config['skaled']['disk']['small'] == 554999808 + assert resource_allocation_config['skaled']['disk']['medium'] == 8879996928 + assert resource_allocation_config['skaled']['disk']['large'] == 71039975424 assert resource_allocation_config['ima']['cpu_shares'] == { 'large': 204, @@ -108,7 +108,7 @@ def test_generate_resource_allocation_config(): } assert isinstance(resource_allocation_config['ima']['mem'], dict) - assert resource_allocation_config['schain']['volume_limits'] == SCHAIN_VOLUME_PARTS + assert resource_allocation_config['skaled']['volume_limits'] == SCHAIN_VOLUME_PARTS def test_update_allocation_config(resource_alloc_config): @@ -194,7 +194,7 @@ def test_get_memory_alloc(params_by_env_type): def test_leveldb_limits(): with mock.patch('node_cli.core.resources.get_disk_size', return_value=NORMAL_DISK_SIZE): resource_allocation_config = compose_resource_allocation_config(DEFAULT_ENV_TYPE) - assert resource_allocation_config['schain']['leveldb_limits'] == { + assert resource_allocation_config['skaled']['leveldb_limits'] == { 'large': {'contract_storage': 12787195576, 'db_storage': 4262398525}, 'medium': {'contract_storage': 1598399446, 'db_storage': 532799815}, 'small': {'contract_storage': 99899965, 'db_storage': 33299988}, From 0c83e4976f704626c6685b35dc29e190886b4be5 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 8 Sep 2025 16:20:09 +0100 Subject: [PATCH 223/332] Fix fair-api service name --- node_cli/operations/fair.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index d6d4334b..4fc10b16 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -126,7 +126,7 @@ def init( ) compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) - wait_for_container(BASE_PASSIVE_FAIR_COMPOSE_SERVICES['fair-api']) + wait_for_container(BASE_PASSIVE_FAIR_COMPOSE_SERVICES['api']) time.sleep(REDIS_START_TIMEOUT) return True From 9903d20e9537e684dddcb46ab95872dc35c0aa04 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 10 Sep 2025 20:29:52 +0100 Subject: [PATCH 224/332] Add `turn on` and `turn off` commands for FAIR nodes --- node_cli/cli/fair_node.py | 31 ++++++++++++++++++++++++++++++- node_cli/fair/common.py | 34 +++++++++++++++++++++++++++++++++- 2 files changed, 63 insertions(+), 2 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 7022f51d..355d1e30 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -18,11 +18,13 @@ # along with this program. If not, see . import click - +from node_cli.cli.info import TYPE from node_cli.core.node import backup from node_cli.fair.active import change_ip as change_ip_fair from node_cli.fair.common import cleanup as cleanup_fair +from node_cli.fair.common import turn_off as turn_off_fair +from node_cli.fair.common import turn_on as turn_on_fair from node_cli.fair.active import exit as exit_fair from node_cli.fair.active import ( get_node_info, @@ -199,3 +201,30 @@ def exit_node() -> None: @streamed_cmd def set_domain_name(domain): set_domain_name_fair(domain) + + +@node.command('turn-off', help='Turn off the node') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to turn off the node?', +) +@streamed_cmd +def _turn_off(): + turn_off_fair(node_type=TYPE) + + +@node.command('turn-on', help='Turn on the node') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to turn on the node?', +) +@click.argument('env_file') +@streamed_cmd +def _turn_on(env_file): + turn_on_fair(env_file, node_type=TYPE) \ No newline at end of file diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 9d24fa29..b1c3d8e0 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -20,7 +20,7 @@ import time import logging -from node_cli.configs import INIT_TIMEOUT, SKALE_DIR +from node_cli.configs import INIT_TIMEOUT, SKALE_DIR, TM_INIT_TIMEOUT from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.node import compose_node_env, is_base_containers_alive @@ -32,6 +32,8 @@ repair_fair_op, update_fair_op, init_fair_op, + turn_off_op, + turn_on_op ) from node_cli.core.host import save_env_params from node_cli.utils.decorators import check_inited, check_not_inited, check_user @@ -134,3 +136,33 @@ def repair_chain(snapshot_from: str = 'any') -> None: SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode ) repair_fair_op(env=env, snapshot_from=snapshot_from) + + +@check_inited +@check_user +def turn_off(node_type: NodeType) -> None: + node_mode = upsert_node_mode() + env = compose_node_env( + SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type, node_mode=node_mode + ) + turn_off_op(node_type=node_type, node_mode=node_mode, env=env) + + +@check_inited +@check_user +def turn_on(sync_schains, env_file, node_type: NodeType) -> None: + node_mode = upsert_node_mode() + env = compose_node_env( + env_file, + inited_node=True, + sync_schains=sync_schains, + node_type=node_type, + node_mode=node_mode, + ) + turn_on_op(env=env, node_type=node_type, node_mode=node_mode) + logger.info('Waiting for containers initialization') + time.sleep(TM_INIT_TIMEOUT) + if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): + print_node_cmd_error() + return + logger.info('Node turned on') \ No newline at end of file From 85cb965fb58b883d861f2ac52c8261875be28000 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 11 Sep 2025 13:50:09 +0100 Subject: [PATCH 225/332] Fix skaled container name in docker utils --- node_cli/utils/docker_utils.py | 4 ++-- tests/cli/health_test.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 26d16bd7..825886e6 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -120,7 +120,7 @@ def get_containers(container_name_filter=None, _all=True) -> list: def get_all_schain_containers(_all=True) -> list: - return docker_client().containers.list(all=_all, filters={'name': 'sk_schain_*'}) + return docker_client().containers.list(all=_all, filters={'name': 'sk_skaled_*'}) def get_all_ima_containers(_all=True) -> list: @@ -203,7 +203,7 @@ def start_container_by_name(container_name: str, dclient: Optional[DockerClient] def remove_schain_container_by_name( schain_name: str, dclient: Optional[DockerClient] = None ) -> None: - container_name = f'sk_schain_{schain_name}' + container_name = f'sk_skaled_{schain_name}' remove_container_by_name(container_name, timeout=SCHAIN_REMOVE_TIMEOUT, dclient=dclient) diff --git a/tests/cli/health_test.py b/tests/cli/health_test.py index 6f3fc022..8b3dc19b 100644 --- a/tests/cli/health_test.py +++ b/tests/cli/health_test.py @@ -9,7 +9,7 @@ 'payload': [ { 'image': 'skalenetwork/schain:1.46-develop.21', - 'name': 'sk_schain_shapely-alfecca-meridiana', + 'name': 'sk_skaled_shapely-alfecca-meridiana', 'state': { 'Status': 'running', 'Running': True, From c21ac67f2d21a17be16bde96a6f72766fe5c60eb Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 11 Sep 2025 15:46:55 +0100 Subject: [PATCH 226/332] Fix containers test --- tests/cli/health_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/cli/health_test.py b/tests/cli/health_test.py index 8b3dc19b..404d9a73 100644 --- a/tests/cli/health_test.py +++ b/tests/cli/health_test.py @@ -51,7 +51,7 @@ def test_containers(): assert result.exit_code == 0 assert ( result.output - == ' Name Status Started At Image \n----------------------------------------------------------------------------------------------------------\nsk_schain_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nsk_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa + == ' Name Status Started At Image \n----------------------------------------------------------------------------------------------------------\nsk_skaled_shapely-alfecca-meridiana Running Jul 31 2020 11:56:35 skalenetwork/schain:1.46-develop.21\nsk_api Running Jul 31 2020 11:55:17 skale-admin:latest \n' # noqa ) From 0bfdb9e72eaafca361a86cf9fbb7cef75d43419f Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 11 Sep 2025 18:45:02 +0100 Subject: [PATCH 227/332] Remove `sync_schains` parameter --- node_cli/fair/common.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index b1c3d8e0..7db0a975 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -150,12 +150,11 @@ def turn_off(node_type: NodeType) -> None: @check_inited @check_user -def turn_on(sync_schains, env_file, node_type: NodeType) -> None: +def turn_on(env_file, node_type: NodeType) -> None: node_mode = upsert_node_mode() env = compose_node_env( env_file, inited_node=True, - sync_schains=sync_schains, node_type=node_type, node_mode=node_mode, ) From 878419a1677933652044a91c939abd40891e0653 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 11 Sep 2025 18:46:31 +0100 Subject: [PATCH 228/332] Add `turn_on_node` and `turn_off_node` commands for passive FAIR nodes --- node_cli/cli/passive_fair_node.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index 487ded44..4d389b51 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -22,6 +22,9 @@ from node_cli.fair.common import init as init_fair from node_cli.fair.common import update as update_fair from node_cli.fair.common import cleanup as cleanup_fair +from node_cli.fair.common import turn_off as turn_off_fair +from node_cli.fair.common import turn_on as turn_on_fair +from node_cli.cli.info import TYPE from node_cli.fair.passive import setup_fair_passive from node_cli.utils.helper import ( URL_OR_ANY_TYPE, @@ -119,3 +122,30 @@ def cleanup_node(): @click.option('--id', required=True, type=int, help=TEXTS['fair']['node']['setup']['id']) def _setup(id: int) -> None: setup_fair_passive(node_id=id) + + +@passive_node.command('turn-off', help='Turn off the node') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to turn off the node?', +) +@streamed_cmd +def turn_off_node() -> None: + turn_off_fair(node_type=TYPE) + + +@passive_node.command('turn-on', help='Turn on the node') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to turn on the node?', +) +@click.argument('env_filepath') +@streamed_cmd +def turn_on_node(env_filepath: str) -> None: + turn_on_fair(env_file=env_filepath, node_type=TYPE) From be228672ec9ef008ad451142d6c82cf9fe56c2f7 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 11 Sep 2025 18:47:39 +0100 Subject: [PATCH 229/332] Rename `turn on/off` functions --- node_cli/cli/fair_node.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 355d1e30..dbb5c559 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -212,7 +212,7 @@ def set_domain_name(domain): prompt='Are you sure you want to turn off the node?', ) @streamed_cmd -def _turn_off(): +def turn_off_node() -> None: turn_off_fair(node_type=TYPE) @@ -224,7 +224,7 @@ def _turn_off(): expose_value=False, prompt='Are you sure you want to turn on the node?', ) -@click.argument('env_file') +@click.argument('env_filepath') @streamed_cmd -def _turn_on(env_file): - turn_on_fair(env_file, node_type=TYPE) \ No newline at end of file +def turn_on_node(env_filepath: str) -> None: + turn_on_fair(env_file=env_filepath, node_type=TYPE) From 2baf27ef8a8370781bf10460ee85a3f2a4b5225d Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 11 Sep 2025 19:27:17 +0100 Subject: [PATCH 230/332] Update `turn_on` function to support FAIR nodes --- node_cli/operations/base.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 2380d91c..2726413b 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -359,14 +359,23 @@ def turn_off(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: def turn_on(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: logger.info('Turning on the node...') - meta_manager = CliMetaManager() - meta_manager.update_meta( - VERSION, - env['NODE_VERSION'], - env['DOCKER_LVMPY_VERSION'], - distro.id(), - distro.version(), - ) + if node_type == NodeType.FAIR: + meta_manager = FairCliMetaManager() + meta_manager.update_meta( + VERSION, + env['NODE_VERSION'], + distro.id(), + distro.version(), + ) + else: + meta_manager = CliMetaManager() + meta_manager.update_meta( + VERSION, + env['NODE_VERSION'], + env['DOCKER_LVMPY_VERSION'], + distro.id(), + distro.version() + ) if env.get('SKIP_DOCKER_CONFIG') != 'True': configure_docker() From 9c3d290762363e6186f4074a2a004e015629fc1a Mon Sep 17 00:00:00 2001 From: Dmytro Date: Fri, 12 Sep 2025 13:52:46 +0100 Subject: [PATCH 231/332] Fix nginx container name --- node_cli/configs/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 5a09dd83..ad4c2216 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -62,7 +62,7 @@ NGINX_TEMPLATE_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'nginx.conf.j2') NGINX_CONFIG_FILEPATH = os.path.join(NODE_DATA_PATH, 'nginx.conf') -NGINX_CONTAINER_NAME = 'skale_nginx' +NGINX_CONTAINER_NAME = 'sk_nginx' LOG_PATH = os.path.join(NODE_DATA_PATH, 'log') REMOVED_CONTAINERS_FOLDER_NAME = '.removed_containers' From d35f885e37c4f5bde12226c007a9fbae9b11751c Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 16 Sep 2025 15:40:13 +0100 Subject: [PATCH 232/332] Sort imports --- node_cli/cli/fair_node.py | 20 ++++++++------------ node_cli/cli/passive_fair_node.py | 6 +++--- node_cli/fair/common.py | 14 +++++++------- 3 files changed, 18 insertions(+), 22 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index dbb5c559..2f7ccc4f 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -18,25 +18,21 @@ # along with this program. If not, see . import click + from node_cli.cli.info import TYPE from node_cli.core.node import backup - from node_cli.fair.active import change_ip as change_ip_fair -from node_cli.fair.common import cleanup as cleanup_fair -from node_cli.fair.common import turn_off as turn_off_fair -from node_cli.fair.common import turn_on as turn_on_fair from node_cli.fair.active import exit as exit_fair -from node_cli.fair.active import ( - get_node_info, - migrate_from_boot, - restore as restore_fair, -) -from node_cli.fair.common import init as init_fair +from node_cli.fair.active import get_node_info, migrate_from_boot from node_cli.fair.active import register as register_fair -from node_cli.fair.common import update as update_fair +from node_cli.fair.active import restore as restore_fair from node_cli.fair.active import set_domain_name as set_domain_name_fair +from node_cli.fair.common import cleanup as cleanup_fair +from node_cli.fair.common import init as init_fair from node_cli.fair.common import repair_chain - +from node_cli.fair.common import turn_off as turn_off_fair +from node_cli.fair.common import turn_on as turn_on_fair +from node_cli.fair.common import update as update_fair from node_cli.utils.helper import IP_TYPE, URL_OR_ANY_TYPE, abort_if_false, streamed_cmd from node_cli.utils.node_type import NodeMode from node_cli.utils.texts import safe_load_texts diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index 4d389b51..e48f0ce0 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -19,12 +19,12 @@ import click -from node_cli.fair.common import init as init_fair -from node_cli.fair.common import update as update_fair +from node_cli.cli.info import TYPE from node_cli.fair.common import cleanup as cleanup_fair +from node_cli.fair.common import init as init_fair from node_cli.fair.common import turn_off as turn_off_fair from node_cli.fair.common import turn_on as turn_on_fair -from node_cli.cli.info import TYPE +from node_cli.fair.common import update as update_fair from node_cli.fair.passive import setup_fair_passive from node_cli.utils.helper import ( URL_OR_ANY_TYPE, diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 7db0a975..c8b4d9d5 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -17,31 +17,31 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import time import logging +import time from node_cli.configs import INIT_TIMEOUT, SKALE_DIR, TM_INIT_TIMEOUT from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.core.docker_config import cleanup_docker_configuration +from node_cli.core.host import save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.core.node_options import upsert_node_mode from node_cli.fair.passive import setup_fair_passive from node_cli.operations import ( FairUpdateType, cleanup_fair_op, - repair_fair_op, - update_fair_op, init_fair_op, + repair_fair_op, turn_off_op, - turn_on_op + turn_on_op, + update_fair_op, ) -from node_cli.core.host import save_env_params from node_cli.utils.decorators import check_inited, check_not_inited, check_user +from node_cli.utils.exit_codes import CLIExitCodes +from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_node_cmd_error from node_cli.utils.texts import safe_load_texts -from node_cli.utils.exit_codes import CLIExitCodes -from node_cli.utils.helper import error_exit logger = logging.getLogger(__name__) TEXTS = safe_load_texts() From 0b01cdea45cfce938db1ebb796d925c5dafe94b0 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 16 Sep 2025 16:03:55 +0100 Subject: [PATCH 233/332] Update README.md --- README.md | 58 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/README.md b/README.md index a50d8a3e..a9cc916a 100644 --- a/README.md +++ b/README.md @@ -828,6 +828,35 @@ Options: * `--yes` - Update without confirmation prompt. * `--force-skaled-start` - Force skaled container to start (hidden option). +#### Fair Node turn-off + +Turn off the Fair node containers. + +```shell +fair node turn-off [--yes] +``` + +Options: + +* `--yes` - Turn off without confirmation. + +#### Fair Node turn-on + +Turn on the Fair node containers. + +```shell +fair node turn-on [ENV_FILEPATH] [--yes] +``` + +Arguments: + +* `ENV_FILEPATH` - Path to the .env file. + +Options: + +* `--yes` - Turn on without additional confirmation. + + #### Fair Node Migrate Switch from boot phase to regular Fair node operation. @@ -1183,6 +1212,35 @@ Update software / configs for passive Fair node. fair passive-node update [--yes] ``` +#### Passive Fair Node turn-off + +Turn off the Fair passive node containers. + +```shell +fair passive-node turn-off [--yes] +``` + +Options: + +* `--yes` - Turn off without confirmation. + +#### Passive Fair Node turn-on + +Turn on the Fair passive node containers. + +```shell +fair passive-node turn-on [ENV_FILEPATH] [--yes] +``` + +Arguments: + +* `ENV_FILEPATH` - Path to the .env file. + +Options: + +* `--yes` - Turn on without additional confirmation. + + #### Passive Fair Node Cleanup Remove all passive Fair node data and containers. From da2d5b8acc93dc17ac52c46cb12604e62e3970cc Mon Sep 17 00:00:00 2001 From: Dmytro Date: Wed, 17 Sep 2025 19:17:37 +0100 Subject: [PATCH 234/332] Update staking functions --- README.md | 54 +++++++++++++++++++--------- node_cli/cli/staking.py | 52 +++++++++++++++++++-------- node_cli/configs/routes.py | 6 ++-- node_cli/fair/staking.py | 72 +++++++++++++++++++++++++++++++------- tests/routes_test.py | 6 ++-- 5 files changed, 144 insertions(+), 46 deletions(-) diff --git a/README.md b/README.md index a50d8a3e..9ccb80ca 100644 --- a/README.md +++ b/README.md @@ -1098,48 +1098,70 @@ Arguments: * `RECEIVER_ADDRESS` - Address to remove from the allowed receivers list. -#### Claim fees +Workflow (fees): request fees -> review exit requests -> claim request. -Claim a specific amount of fees or all fees to the node wallet. +#### Request fees + +Create a request to claim a specific amount of earned fees (FAIR). Use `--all` to request all. ```shell -fair staking claim-fees -fair staking claim-fees --all +fair staking request-fees +fair staking request-fees --all ``` -#### Set fee rate +#### Request send fees -Set the fee rate (uint16 value) used by the staking logic. +Create a request to send a specific amount (or all) of earned fees to an address. ```shell -fair staking set-fee-rate +fair staking request-send-fees +fair staking request-send-fees --all ``` Arguments: -* `FEE_RATE` - Fee rate value as integer (uint16). +* `TO_ADDRESS` - Destination address for the fee transfer. +* `AMOUNT` - Amount of fees to include in the request (FAIR). -#### Send fees +#### Claim request -Send a specific amount of fees to the default allowed receiver. +Claim a previously created request by its request ID once it is unlocked. ```shell -fair staking send-fees +fair staking claim-request ``` -Arguments: +#### Get exit requests -* `TO_ADDRESS` - Destination address for the fee transfer. -* `AMOUNT` - Amount of fees to send (FAIR). Use `--all` to send all. +List exit (fee withdrawal) requests for the current wallet. Use `--json` for raw JSON output. + +```shell +fair staking exit-requests +fair staking exit-requests --json +``` + +Default output (non-JSON) shows: `request_id`, `user`, `node_id`, `amount_wei`, `amount_fair`, `unlock_date (ISO)`. #### Get earned fee amount -Get the currently earned fee amount. +Get the currently earned (unrequested) fee amount. ```shell -fair staking get-earned-fee-amount +fair staking earned-fee-amount ``` +#### Set fee rate + +Set the fee rate (uint16 value) used by the staking logic. + +```shell +fair staking set-fee-rate +``` + +Arguments: + +* `FEE_RATE` - Fee rate value as integer (uint16). + ### Passive Fair Node commands > Prefix: `fair passive-node` (passive Fair build) diff --git a/node_cli/cli/staking.py b/node_cli/cli/staking.py index 8cde234c..8cb08d2d 100644 --- a/node_cli/cli/staking.py +++ b/node_cli/cli/staking.py @@ -23,9 +23,11 @@ add_allowed_receiver, remove_allowed_receiver, set_fee_rate, - claim_fees, - send_fees, + request_fees, + request_send_fees, + claim_request, get_earned_fee_amount, + get_exit_requests, ) from node_cli.utils.helper import abort_if_false @@ -79,39 +81,61 @@ def _set_fee_rate(fee_rate: int) -> None: set_fee_rate(fee_rate) -@staking.command('claim-fees', help='Claim fees amount (FAIR) or all with --all') +@staking.command('request-fees', help='Create a request to claim fees (FAIR) or all with --all') @click.argument('amount', type=float, required=False) -@click.option('--all', 'claim_all', is_flag=True, help='Claim all fees') +@click.option('--all', 'request_all', is_flag=True, help='Request all fees') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to claim fees?', + prompt='Are you sure you want to request fees?', ) -def _claim_fees(amount: float | None, claim_all: bool) -> None: - if amount is None and not claim_all: +def _request_fees(amount: float | None, request_all: bool) -> None: + if amount is None and not request_all: raise click.UsageError('Provide or use --all') - claim_fees(None if claim_all else amount) + request_fees(None if request_all else amount) -@staking.command('send-fees', help='Send fees to address (or all with --all)') +@staking.command( + 'request-send-fees', + help='Create a request to send fees to address (or all with --all)', +) @click.argument('to') @click.argument('amount', type=float, required=False) -@click.option('--all', 'send_all', is_flag=True, help='Send all fees to address') +@click.option('--all', 'send_all', is_flag=True, help='Request to send all fees to address') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to send fees?', + prompt='Are you sure you want to request to send fees?', ) -def _send_fees(to: str, amount: float | None, send_all: bool) -> None: +def _request_send_fees(to: str, amount: float | None, send_all: bool) -> None: if amount is None and not send_all: raise click.UsageError('Provide or use --all') - send_fees(to, None if send_all else amount) + request_send_fees(to, None if send_all else amount) -@staking.command('get-earned-fee-amount', help='Get earned fee amount') +@staking.command('claim-request', help='Claim previously created request by request ID') +@click.argument('request_id', type=int) +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to claim this request?', +) +def _claim_request(request_id: int) -> None: + claim_request(request_id) + + +@staking.command('earned-fee-amount', help='Get earned fee amount') def _get_earned_fee_amount() -> None: get_earned_fee_amount() + + +@staking.command('exit-requests', help='Get exit requests for current wallet') +@click.option('--json', 'raw', is_flag=True, help='Output in JSON format') +def _get_exit_requests(raw: bool) -> None: + get_exit_requests(raw=raw) diff --git a/node_cli/configs/routes.py b/node_cli/configs/routes.py index 83bb2866..336fea3e 100644 --- a/node_cli/configs/routes.py +++ b/node_cli/configs/routes.py @@ -47,9 +47,11 @@ 'add-receiver', 'remove-receiver', 'set-fee-rate', - 'claim-fees', - 'send-fees', + 'request-fees', + 'request-send-fees', + 'claim-request', 'get-earned-fee-amount', + 'get-exit-requests', ], } } diff --git a/node_cli/fair/staking.py b/node_cli/fair/staking.py index 9e81e69f..4b7a4008 100644 --- a/node_cli/fair/staking.py +++ b/node_cli/fair/staking.py @@ -18,6 +18,8 @@ # along with this program. If not, see . from typing import Any +import json +from datetime import datetime, timezone from node_cli.utils.decorators import check_inited from node_cli.utils.exit_codes import CLIExitCodes @@ -50,12 +52,27 @@ def remove_allowed_receiver(receiver: str) -> None: @check_inited -def send_fees(to: str, amount: float | None) -> None: +def request_fees(amount: float | None) -> None: + json_data: dict[str, Any] = {} + if amount is not None: + json_data['amount'] = amount + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='request-fees', json=json_data) + _handle_response( + status, + payload, + success='All fees requested' if amount is None else f'Fees requested: {amount}', + ) + + +@check_inited +def request_send_fees(to: str, amount: float | None) -> None: json_data: dict[str, Any] = {'to': to} if amount is not None: json_data['amount'] = amount - status, payload = post_request(blueprint=BLUEPRINT_NAME, method='send-fees', json=json_data) - _handle_response(status, payload, success=f'Fees sent to {to}') + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='request-send-fees', json=json_data + ) + _handle_response(status, payload, success=f'Fees request to send to {to} created') @check_inited @@ -67,16 +84,11 @@ def set_fee_rate(fee_rate: int) -> None: @check_inited -def claim_fees(amount: float | None) -> None: - json_data: dict[str, Any] = {} - if amount is not None: - json_data['amount'] = amount - status, payload = post_request(blueprint=BLUEPRINT_NAME, method='claim-fees', json=json_data) - _handle_response( - status, - payload, - success='All fees claimed' if amount is None else f'Fees claimed: {amount}', +def claim_request(request_id: int) -> None: + status, payload = post_request( + blueprint=BLUEPRINT_NAME, method='claim-request', json={'requestId': request_id} ) + _handle_response(status, payload, success=f'Request claimed: {request_id}') @check_inited @@ -88,3 +100,39 @@ def get_earned_fee_amount() -> None: print(f'Earned fee amount: {amount_wei} wei ({amount_ether} FAIR)') return error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) + + +@check_inited +def get_exit_requests(raw: bool = False) -> None: + status, payload = post_request(blueprint=BLUEPRINT_NAME, method='get-exit-requests') + if status == 'ok' and isinstance(payload, dict): + exit_requests = payload.get('exit_requests') + if not isinstance(exit_requests, list): + error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) + return + if raw: + print(json.dumps(exit_requests, indent=2)) + return + for req in exit_requests: + try: + request_id = req.get('request_id') + user = req.get('user') + node_id = req.get('node_id') + amount = req.get('amount') + unlock_date = req.get('unlock_date') + amount_fair = None + if isinstance(amount, int): + amount_fair = amount / 10**18 + unlock_iso = None + if isinstance(unlock_date, int): + unlock_iso = datetime.fromtimestamp(unlock_date, tz=timezone.utc).isoformat() + base = ( + f'request_id: {request_id} | user: {user} | node_id: {node_id} | ' + f'amount_wei: {amount} | amount_fair: {amount_fair} | ' + f'unlock_date: {unlock_date}' + ) + print(base + (f' ({unlock_iso})' if unlock_iso else '')) + except Exception: # noqa: BLE001 + print(req) + return + error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) diff --git a/tests/routes_test.py b/tests/routes_test.py index b64e9e0a..39490ab3 100644 --- a/tests/routes_test.py +++ b/tests/routes_test.py @@ -42,9 +42,11 @@ '/api/v1/fair-staking/add-receiver', '/api/v1/fair-staking/remove-receiver', '/api/v1/fair-staking/set-fee-rate', - '/api/v1/fair-staking/claim-fees', - '/api/v1/fair-staking/send-fees', + '/api/v1/fair-staking/request-fees', + '/api/v1/fair-staking/request-send-fees', + '/api/v1/fair-staking/claim-request', '/api/v1/fair-staking/get-earned-fee-amount', + '/api/v1/fair-staking/get-exit-requests', ] From 42d45335e182239e997b067bab5bdef7bfbe7820 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 18 Sep 2025 21:46:51 +0100 Subject: [PATCH 235/332] Fix repair cmd --- node_cli/cli/fair_node.py | 6 +++--- node_cli/fair/common.py | 2 +- text.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 2f7ccc4f..343be1de 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -138,7 +138,7 @@ def migrate_node(env_filepath: str) -> None: type=URL_OR_ANY_TYPE, default='any', hidden=True, - help=TEXTS['fair']['node']['repair']['snapshot_from'], + help=TEXTS['fair']['node']['repair']['snapshot'], ) @click.option( '--yes', @@ -148,8 +148,8 @@ def migrate_node(env_filepath: str) -> None: prompt=TEXTS['fair']['node']['repair']['warning'], ) @streamed_cmd -def repair(snapshot_from: str = 'any') -> None: - repair_chain(snapshot_from=snapshot_from) +def repair(snapshot: str = 'any') -> None: + repair_chain(snapshot_from=snapshot) @node.command('cleanup', help='Cleanup Fair node.') diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index c8b4d9d5..dc91e26e 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -164,4 +164,4 @@ def turn_on(env_file, node_type: NodeType) -> None: if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): print_node_cmd_error() return - logger.info('Node turned on') \ No newline at end of file + logger.info('Node turned on') diff --git a/text.yml b/text.yml index c0cbe23a..6013f2d0 100644 --- a/text.yml +++ b/text.yml @@ -82,7 +82,7 @@ fair: repair: help: Repair Fair chain node warning: Are you sure you want to repair Fair chain node? In rare cases may cause data loss and require additional maintenance - snapshot_from: IP of the node to take snapshot from (put "any" to use any available node) + snapshot: IP of the node to take snapshot from (put "any" to use any available node) repair_requested: Repair mode is requested not_inited: Node should be initialized to proceed with operation From c38d88f95cc6843a8b3db464e3e191c34a28811c Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 25 Sep 2025 17:27:21 +0100 Subject: [PATCH 236/332] Fix snapshot option for passive nodes, add logs --- node_cli/fair/record/redis_record.py | 4 ++++ node_cli/operations/fair.py | 3 +++ node_cli/utils/helper.py | 4 ++-- tests/cli/passive_node_test.py | 31 ++++++++++++++++++++++++++++ 4 files changed, 40 insertions(+), 2 deletions(-) diff --git a/node_cli/fair/record/redis_record.py b/node_cli/fair/record/redis_record.py index b71aa9e4..f40aa898 100644 --- a/node_cli/fair/record/redis_record.py +++ b/node_cli/fair/record/redis_record.py @@ -18,6 +18,7 @@ # along with this program. If not, see . import abc +import logging from dataclasses import dataclass from datetime import datetime from typing import Any @@ -26,6 +27,8 @@ from node_cli.configs import REDIS_URI +logger = logging.getLogger(__name__) + cpool: redis.ConnectionPool = redis.ConnectionPool.from_url(REDIS_URI) rs: redis.Redis = redis.Redis(connection_pool=cpool) @@ -83,6 +86,7 @@ def _get_field(self, field_name: str): def _set_field(self, field_name: str, value) -> None: key = self._get_field_key(field_name) serialized_value = self._serialize_field(value, self._record_fields()[field_name].type) + logger.info('Setting field %s to value %s', field_name, serialized_value) rs.set(key, serialized_value) def _deserialize_field(self, value, field_type: type): diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 4fc10b16..689b4a51 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -112,8 +112,10 @@ def init( upsert_node_mode(node_mode=node_mode) if node_mode == NodeMode.PASSIVE: + logger.info('Setting passive node options') set_passive_node_options(archive=archive, indexer=indexer) if snapshot: + logger.info('Waiting %s seconds for redis to start', REDIS_START_TIMEOUT) time.sleep(REDIS_START_TIMEOUT) trigger_skaled_snapshot_mode(env=env, snapshot_from=snapshot) @@ -293,6 +295,7 @@ def trigger_skaled_snapshot_mode(env: dict, snapshot_from: str = 'any') -> None: record = get_fair_chain_record(env) if not snapshot_from: snapshot_from = 'any' + logger.info('Triggering skaled snapshot mode, snapshot_from: %s', snapshot_from) record.set_snapshot_from(snapshot_from) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index d299fbf6..c9c9bfb0 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -382,13 +382,13 @@ def convert(self, value, param, ctx): return value -class UrlOrAnyType(click.ParamType): +class UrlOrAnyType(UrlType): name = 'url' def convert(self, value, param, ctx): if value == 'any': return value - super().convert(value, param, ctx) + return super().convert(value, param, ctx) class IpType(click.ParamType): diff --git a/tests/cli/passive_node_test.py b/tests/cli/passive_node_test.py index 94cde559..e0342340 100644 --- a/tests/cli/passive_node_test.py +++ b/tests/cli/passive_node_test.py @@ -57,6 +57,37 @@ def test_init_passive(mocked_g_config, clean_node_options, passive_user_conf): assert result.exit_code == 0 +def test_init_passive_snapshot_any(mocked_g_config, clean_node_options, passive_user_conf): + pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) + with ( + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.operations.base.cleanup_volume_artifacts'), + mock.patch('node_cli.operations.base.download_skale_node'), + mock.patch('node_cli.operations.base.sync_skale_node'), + mock.patch('node_cli.operations.base.configure_docker'), + mock.patch('node_cli.operations.base.prepare_host'), + mock.patch('node_cli.operations.base.ensure_filestorage_mapping'), + mock.patch('node_cli.operations.base.link_env_file'), + mock.patch('node_cli.operations.base.generate_nginx_config'), + mock.patch('node_cli.operations.base.prepare_block_device'), + mock.patch('node_cli.operations.base.CliMetaManager.update_meta'), + mock.patch('node_cli.operations.base.update_resource_allocation'), + mock.patch('node_cli.operations.base.update_images'), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), + mock.patch('node_cli.configs.user.validate_alias_or_address'), + mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), + mock.patch('node_cli.operations.base.compose_up') as compose_up_mock, + ): + result = run_command(_init_passive, [passive_user_conf.as_posix(), '--snapshot', 'any']) + assert result.exit_code == 0 + assert compose_up_mock.called + args, kwargs = compose_up_mock.call_args + assert 'snapshot' in kwargs + assert kwargs['snapshot'] == 'any' + + def test_init_passive_archive(mocked_g_config, clean_node_options, passive_user_conf): pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) with ( From 273368bcead9c86dcc9da3d46d2997d80913d32e Mon Sep 17 00:00:00 2001 From: Dmytro Date: Thu, 25 Sep 2025 18:16:45 +0100 Subject: [PATCH 237/332] Add fair passive node tests --- tests/cli/fair_passive_node_test.py | 86 +++++++++++++++++++++++++++++ tests/cli/passive_node_test.py | 31 ----------- 2 files changed, 86 insertions(+), 31 deletions(-) create mode 100644 tests/cli/fair_passive_node_test.py diff --git a/tests/cli/fair_passive_node_test.py b/tests/cli/fair_passive_node_test.py new file mode 100644 index 00000000..34ececf9 --- /dev/null +++ b/tests/cli/fair_passive_node_test.py @@ -0,0 +1,86 @@ +import logging +import pathlib + +import mock + +from node_cli.cli.passive_fair_node import init_passive_node, update_node +from node_cli.configs import NODE_DATA_PATH, SKALE_DIR +from node_cli.utils.helper import init_default_logger +from tests.helper import run_command, subprocess_run_mock +from tests.resources_test import BIG_DISK_SIZE + +logger = logging.getLogger(__name__) +init_default_logger() + + +def test_init_fair_passive(mocked_g_config, tmp_path): + env_file = tmp_path / 'test-env' + env_file.write_text('') + pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.fair.common.init_fair_op', return_value=True), + mock.patch('node_cli.fair.common.compose_node_env', return_value={}), + mock.patch('node_cli.fair.common.save_env_params'), + mock.patch('node_cli.fair.passive.setup_fair_passive'), + mock.patch('node_cli.fair.common.time.sleep'), + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), + ): + result = run_command( + init_passive_node, + [ + env_file.as_posix(), + '--id', + '1', + ], + ) + assert result.exit_code == 0 + + +def test_init_fair_passive_snapshot_any(mocked_g_config, tmp_path): + env_file = tmp_path / 'test-env' + env_file.write_text('') + pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.fair.common.init_fair_op', return_value=True), + mock.patch('node_cli.fair.common.compose_node_env', return_value={}), + mock.patch('node_cli.fair.common.save_env_params'), + mock.patch('node_cli.fair.passive.setup_fair_passive'), + mock.patch('node_cli.fair.common.time.sleep'), + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), + ): + result = run_command( + init_passive_node, + [ + env_file.as_posix(), + '--id', + '2', + '--snapshot', + 'any', + ], + ) + assert result.exit_code == 0 + + +def test_update_fair_passive(mocked_g_config, tmp_path): + env_file = tmp_path / 'test-env' + env_file.write_text('') + pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.fair.common.update_fair_op', return_value=True), + mock.patch('node_cli.fair.common.compose_node_env', return_value={}), + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), + ): + result = run_command(update_node, [env_file.as_posix(), '--yes']) + assert result.exit_code == 0 diff --git a/tests/cli/passive_node_test.py b/tests/cli/passive_node_test.py index e0342340..94cde559 100644 --- a/tests/cli/passive_node_test.py +++ b/tests/cli/passive_node_test.py @@ -57,37 +57,6 @@ def test_init_passive(mocked_g_config, clean_node_options, passive_user_conf): assert result.exit_code == 0 -def test_init_passive_snapshot_any(mocked_g_config, clean_node_options, passive_user_conf): - pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) - with ( - mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), - mock.patch('node_cli.operations.base.cleanup_volume_artifacts'), - mock.patch('node_cli.operations.base.download_skale_node'), - mock.patch('node_cli.operations.base.sync_skale_node'), - mock.patch('node_cli.operations.base.configure_docker'), - mock.patch('node_cli.operations.base.prepare_host'), - mock.patch('node_cli.operations.base.ensure_filestorage_mapping'), - mock.patch('node_cli.operations.base.link_env_file'), - mock.patch('node_cli.operations.base.generate_nginx_config'), - mock.patch('node_cli.operations.base.prepare_block_device'), - mock.patch('node_cli.operations.base.CliMetaManager.update_meta'), - mock.patch('node_cli.operations.base.update_resource_allocation'), - mock.patch('node_cli.operations.base.update_images'), - mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), - mock.patch('node_cli.operations.base.configure_nftables'), - mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), - mock.patch('node_cli.operations.base.compose_up') as compose_up_mock, - ): - result = run_command(_init_passive, [passive_user_conf.as_posix(), '--snapshot', 'any']) - assert result.exit_code == 0 - assert compose_up_mock.called - args, kwargs = compose_up_mock.call_args - assert 'snapshot' in kwargs - assert kwargs['snapshot'] == 'any' - - def test_init_passive_archive(mocked_g_config, clean_node_options, passive_user_conf): pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) with ( From ae3bfc6cf8515a727803778abe36e88429e49c05 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 27 Oct 2025 11:37:52 +0000 Subject: [PATCH 238/332] Fix init and update commands for SKALE nodes --- Dockerfile | 24 ++++++++++++------------ node_cli/core/node.py | 4 ++-- node_cli/operations/base.py | 21 ++++++++------------- 3 files changed, 22 insertions(+), 27 deletions(-) diff --git a/Dockerfile b/Dockerfile index c2fc6972..abc1fcd3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,18 +2,18 @@ FROM python:3.11-bookworm ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt install -y \ - git \ - build-essential \ - software-properties-common \ - zlib1g-dev \ - libssl-dev \ - libffi-dev \ - swig \ - iptables \ - nftables \ - python3-nftables \ - libxslt-dev \ - kmod + git \ + build-essential \ + software-properties-common \ + zlib1g-dev \ + libssl-dev \ + libffi-dev \ + swig \ + iptables \ + nftables \ + python3-nftables \ + libxslt-dev \ + kmod RUN mkdir /app diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 3a704ddc..992c1e20 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -157,7 +157,7 @@ def init(env_filepath: str, node_type: NodeType) -> None: node_mode = NodeMode.ACTIVE env = compose_node_env(env_filepath=env_filepath, node_type=node_type, node_mode=node_mode) - init_op(env_filepath=env_filepath, env=env, node_type=node_type, node_mode=node_mode) + init_op(env_filepath=env_filepath, env=env, node_mode=node_mode) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): @@ -314,7 +314,7 @@ def update( node_type=node_type, node_mode=node_mode, ) - update_ok = update_op(env_filepath, env, node_type=node_type, node_mode=node_mode) + update_ok = update_op(env_filepath, env, node_mode=node_mode) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 2726413b..55c54914 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -114,8 +114,8 @@ def wrapper(env_filepath: str, env: Dict, node_mode: NodeMode, *args, **kwargs): @checked_host -def update(env_filepath: str, env: Dict, node_type: NodeType, node_mode: NodeMode) -> bool: - compose_rm(node_type=node_type, node_mode=node_mode, env=env) +def update(env_filepath: str, env: Dict, node_mode: NodeMode) -> bool: + compose_rm(node_type=NodeType.SKALE, node_mode=node_mode, env=env) remove_dynamic_containers() sync_skale_node() @@ -151,8 +151,8 @@ def update(env_filepath: str, env: Dict, node_type: NodeType, node_mode: NodeMod distro.id(), distro.version(), ) - update_images(env=env, node_type=node_type, node_mode=node_mode) - compose_up(env=env, node_type=node_type, node_mode=node_mode) + update_images(env=env, node_type=NodeType.SKALE, node_mode=node_mode) + compose_up(env=env, node_type=NodeType.SKALE, node_mode=node_mode) return True @@ -199,7 +199,7 @@ def update_fair_boot(env_filepath: str, env: Dict, node_mode: NodeMode = NodeMod @checked_host -def init(env_filepath: str, env: dict, node_type: NodeType, node_mode: NodeMode) -> None: +def init(env_filepath: str, env: dict, node_mode: NodeMode) -> None: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() if env.get('SKIP_DOCKER_CONFIG') != 'True': @@ -229,9 +229,8 @@ def init(env_filepath: str, env: dict, node_type: NodeType, node_mode: NodeMode) distro.version(), ) update_resource_allocation(env_type=env['ENV_TYPE']) - update_images(env=env, node_type=node_type, node_mode=node_mode) - - compose_up(env=env, node_type=node_type, node_mode=node_mode) + update_images(env=env, node_type=NodeType.SKALE, node_mode=node_mode) + compose_up(env=env, node_type=NodeType.SKALE, node_mode=node_mode) @checked_host @@ -370,11 +369,7 @@ def turn_on(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: else: meta_manager = CliMetaManager() meta_manager.update_meta( - VERSION, - env['NODE_VERSION'], - env['DOCKER_LVMPY_VERSION'], - distro.id(), - distro.version() + VERSION, env['NODE_VERSION'], env['DOCKER_LVMPY_VERSION'], distro.id(), distro.version() ) if env.get('SKIP_DOCKER_CONFIG') != 'True': configure_docker() From c756859520d02bccec49918c2269e2065c3ca7c2 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 28 Oct 2025 17:53:32 +0000 Subject: [PATCH 239/332] Add common cleanup function for SKALE active and passive nodes --- node_cli/operations/__init__.py | 2 +- node_cli/operations/base.py | 21 ++++++++++++++++----- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index 5ae8103f..f2ac1a94 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -27,7 +27,7 @@ turn_off as turn_off_op, turn_on as turn_on_op, restore as restore_op, - cleanup_passive as cleanup_passive_op, + cleanup as cleanup_skale_op, configure_nftables, ) from node_cli.operations.fair import ( # noqa diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 2726413b..a866c9ea 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -29,11 +29,12 @@ CONTAINER_CONFIG_PATH, CONTAINER_CONFIG_TMP_PATH, GLOBAL_SKALE_DIR, + NFTABLES_CHAIN_FOLDER_PATH, SKALE_DIR, ) from node_cli.core.checks import CheckType from node_cli.core.checks import run_checks as run_host_checks -from node_cli.core.docker_config import configure_docker +from node_cli.core.docker_config import cleanup_docker_configuration, configure_docker from node_cli.core.host import ( ensure_btrfs_kernel_module_autoloaded, link_env_file, @@ -69,7 +70,7 @@ docker_cleanup, remove_dynamic_containers, ) -from node_cli.utils.helper import rm_dir, str_to_bool +from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool from node_cli.utils.meta import CliMetaManager, FairCliMetaManager from node_cli.utils.node_type import NodeType, NodeMode from node_cli.utils.print_formatters import print_failed_requirements_checks @@ -438,8 +439,18 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): return True -def cleanup_passive(env, schain_name: str) -> None: - turn_off(env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) - cleanup_no_lvm_datadir(chain_name=schain_name) +def cleanup_active(): + pass + + +def cleanup(node_mode: NodeMode, env: dict) -> None: + turn_off(env, node_type=NodeType.SKALE, node_mode=node_mode) + if node_mode == NodeMode.PASSIVE: + schain_name = env['SCHAIN_NAME'] + cleanup_no_lvm_datadir(chain_name=schain_name) + else: + cleanup_active() rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) + cleanup_dir_content(NFTABLES_CHAIN_FOLDER_PATH) + cleanup_docker_configuration() \ No newline at end of file From 26ba3ffd199de642a61f71ba57c7b2bbe5510d34 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 28 Oct 2025 17:55:55 +0000 Subject: [PATCH 240/332] Change `cleanup_passive` function by common one for all SKALE nodes --- node_cli/core/node.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 3a704ddc..143f6848 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -56,7 +56,7 @@ ) from node_cli.migrations.focal_to_jammy import migrate as migrate_2_6 from node_cli.operations import ( - cleanup_passive_op, + cleanup_skale_op, configure_nftables, init_op, init_passive_op, @@ -227,13 +227,13 @@ def update_passive(env_filepath: str, unsafe_ok: bool = False) -> None: @check_inited @check_user -def cleanup_passive() -> None: +def cleanup(node_mode: NodeMode) -> None: + node_mode = upsert_node_mode(node_mode=node_mode) env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE + SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.SKALE, node_mode=node_mode ) - schain_name = env['SCHAIN_NAME'] - cleanup_passive_op(env, schain_name) - logger.info('Passive node was cleaned up, all containers and data removed') + cleanup_skale_op(node_mode=node_mode, env=env) + logger.info('SKALE node was cleaned up, all containers and data removed') def compose_node_env( From f892699c768885c81a4e57b02cc09bd2f42cd2f8 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 28 Oct 2025 18:17:45 +0000 Subject: [PATCH 241/332] Add CLI cleanup commands for SKALE nodes --- node_cli/cli/node.py | 14 ++++++++++++++ node_cli/cli/passive_node.py | 11 ++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 20e2d701..8f55e93c 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -21,6 +21,7 @@ from node_cli.cli.info import TYPE from node_cli.core.node import ( + cleanup as cleanup_skale, configure_firewall_rules, get_node_signature, init, @@ -257,3 +258,16 @@ def version(raw: bool) -> None: print(meta_info) else: print_meta_info(meta_info) + + +@node.command('cleanup', help='Remove all SKALE node data and containers..') +@click.option( + '--yes', + is_flag=True, + callback=abort_if_false, + expose_value=False, + prompt='Are you sure you want to remove all SKALE node data and containers?', +) +@streamed_cmd +def cleanup_node(): + cleanup_skale(node_mode=NodeMode.ACTIVE) \ No newline at end of file diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index 6be74b28..adadfd48 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -21,8 +21,9 @@ import click -from node_cli.core.node import init_passive, update_passive, cleanup_passive +from node_cli.core.node import init_passive, update_passive, cleanup as cleanup_skale from node_cli.utils.helper import abort_if_false, error_exit, streamed_cmd, URL_TYPE +from node_cli.utils.node_type import NodeMode from node_cli.utils.texts import safe_load_texts @@ -72,14 +73,14 @@ def _update_passive(env_file, unsafe_ok): update_passive(env_file) -@passive_node.command('cleanup', help='Remove passive node data and containers') +@passive_node.command('cleanup', help='Remove all passive SKALE node data and containers.') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to remove all node containers and data?', + prompt='Are you sure you want to remove all SKALE node data and containers?', ) @streamed_cmd -def _cleanup_passive() -> None: - cleanup_passive() +def cleanup_node(): + cleanup_skale(node_mode=NodeMode.PASSIVE) \ No newline at end of file From 84633ccb3849b5c6bf2407e3e32c83a88951eb81 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 28 Oct 2025 19:43:16 +0000 Subject: [PATCH 242/332] Add implementation for function `cleanup_active` --- node_cli/operations/base.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index a866c9ea..13abb0f3 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -70,9 +70,9 @@ docker_cleanup, remove_dynamic_containers, ) -from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool +from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool, run_cmd from node_cli.utils.meta import CliMetaManager, FairCliMetaManager -from node_cli.utils.node_type import NodeType, NodeMode +from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks logger = logging.getLogger(__name__) @@ -439,8 +439,22 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): return True +def cleanup_passive(env, schain_name: str) -> None: + turn_off(env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) + cleanup_no_lvm_datadir(chain_name=schain_name) + rm_dir(GLOBAL_SKALE_DIR) + rm_dir(SKALE_DIR) + + def cleanup_active(): - pass + logger.info('Starting cleanup for active node...') + logger.info('Unmounting /mnt/schains-shared-space...') + run_cmd(['sudo', 'umount', '/mnt/schains-shared-space'], check_code=False) + logger.info('Cleaning up /mnt directory content...') + cleanup_dir_content('/mnt/') + logger.info('Removing LVM volume group "schains"...') + run_cmd(['sudo', 'lvremove', '-f', 'schains'], check_code=False) + logger.info('Active node cleanup finished.') def cleanup(node_mode: NodeMode, env: dict) -> None: @@ -453,4 +467,4 @@ def cleanup(node_mode: NodeMode, env: dict) -> None: rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) cleanup_dir_content(NFTABLES_CHAIN_FOLDER_PATH) - cleanup_docker_configuration() \ No newline at end of file + cleanup_docker_configuration() From b3f11b6a3a1defe78e8abcf18698d7816a84f46b Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 28 Oct 2025 19:45:00 +0000 Subject: [PATCH 243/332] Add `prune` option for SKALE node clean up commands --- node_cli/cli/node.py | 5 +++-- node_cli/cli/passive_node.py | 5 +++-- node_cli/core/node.py | 4 ++-- node_cli/operations/base.py | 5 ++++- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 8f55e93c..6180fc0a 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -268,6 +268,7 @@ def version(raw: bool) -> None: expose_value=False, prompt='Are you sure you want to remove all SKALE node data and containers?', ) +@click.option('--prune', is_flag=True, help='Prune docker system.') @streamed_cmd -def cleanup_node(): - cleanup_skale(node_mode=NodeMode.ACTIVE) \ No newline at end of file +def cleanup_node(prune): + cleanup_skale(node_mode=NodeMode.ACTIVE, prune=prune) diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index adadfd48..ee466f06 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -81,6 +81,7 @@ def _update_passive(env_file, unsafe_ok): expose_value=False, prompt='Are you sure you want to remove all SKALE node data and containers?', ) +@click.option('--prune', is_flag=True, help='Prune docker system.') @streamed_cmd -def cleanup_node(): - cleanup_skale(node_mode=NodeMode.PASSIVE) \ No newline at end of file +def cleanup_node(prune): + cleanup_skale(node_mode=NodeMode.PASSIVE, prune=prune) \ No newline at end of file diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 143f6848..46f30ebb 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -227,12 +227,12 @@ def update_passive(env_filepath: str, unsafe_ok: bool = False) -> None: @check_inited @check_user -def cleanup(node_mode: NodeMode) -> None: +def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) env = compose_node_env( SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.SKALE, node_mode=node_mode ) - cleanup_skale_op(node_mode=node_mode, env=env) + cleanup_skale_op(node_mode=node_mode, env=env, prune=prune) logger.info('SKALE node was cleaned up, all containers and data removed') diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 13abb0f3..e6a9a53c 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -69,6 +69,7 @@ compose_up, docker_cleanup, remove_dynamic_containers, + system_prune, ) from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool, run_cmd from node_cli.utils.meta import CliMetaManager, FairCliMetaManager @@ -457,8 +458,10 @@ def cleanup_active(): logger.info('Active node cleanup finished.') -def cleanup(node_mode: NodeMode, env: dict) -> None: +def cleanup(node_mode: NodeMode, env: dict, prune: bool = False) -> None: turn_off(env, node_type=NodeType.SKALE, node_mode=node_mode) + if prune: + system_prune() if node_mode == NodeMode.PASSIVE: schain_name = env['SCHAIN_NAME'] cleanup_no_lvm_datadir(chain_name=schain_name) From 0a1abc434a6ffbe467d69bba6cf845f82fbdb284 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 13:57:54 +0000 Subject: [PATCH 244/332] Rename `cleanup_node` function to `_cleanup_node` --- node_cli/cli/node.py | 2 +- node_cli/cli/passive_node.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 6180fc0a..bdb4a3b7 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -270,5 +270,5 @@ def version(raw: bool) -> None: ) @click.option('--prune', is_flag=True, help='Prune docker system.') @streamed_cmd -def cleanup_node(prune): +def _cleanup_node(prune): cleanup_skale(node_mode=NodeMode.ACTIVE, prune=prune) diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index ee466f06..7c7ce7d7 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -83,5 +83,5 @@ def _update_passive(env_file, unsafe_ok): ) @click.option('--prune', is_flag=True, help='Prune docker system.') @streamed_cmd -def cleanup_node(prune): +def _cleanup_node(prune): cleanup_skale(node_mode=NodeMode.PASSIVE, prune=prune) \ No newline at end of file From 3e0d4469fd4b8eb1f82ff97f57c9d70892a3ef28 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 15:30:53 +0000 Subject: [PATCH 245/332] Add `prune` option for FAIR node clean up commands --- node_cli/cli/fair_node.py | 9 +++++---- node_cli/cli/passive_fair_node.py | 9 +++++---- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 343be1de..56ab9d75 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -152,17 +152,18 @@ def repair(snapshot: str = 'any') -> None: repair_chain(snapshot_from=snapshot) -@node.command('cleanup', help='Cleanup Fair node.') +@node.command('cleanup', help='Remove all FAIR node data and containers.') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to cleanup Fair node?', + prompt='Are you sure you want to remove all FAIR node data and containers?', ) +@click.option('--prune', is_flag=True, help='Prune docker system.') @streamed_cmd -def cleanup_node(): - cleanup_fair(node_mode=NodeMode.ACTIVE) +def cleanup_node(prune): + cleanup_fair(node_mode=NodeMode.ACTIVE, prune=prune) @node.command('change-ip', help=TEXTS['fair']['node']['change-ip']['help']) diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index e48f0ce0..83eb1bd1 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -105,17 +105,18 @@ def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: b ) -@passive_node.command('cleanup', help='Cleanup Fair node.') +@passive_node.command('cleanup', help='Remove all FAIR node data and containers.') @click.option( '--yes', is_flag=True, callback=abort_if_false, expose_value=False, - prompt='Are you sure you want to cleanup Fair node?', + prompt='Are you sure you want to remove all FAIR node data and containers?', ) +@click.option('--prune', is_flag=True, help='Prune docker system.') @streamed_cmd -def cleanup_node(): - cleanup_fair(node_mode=NodeMode.PASSIVE) +def cleanup_node(prune): + cleanup_fair(node_mode=NodeMode.PASSIVE, prune=prune) @passive_node.command('setup', help=TEXTS['fair']['node']['setup']['help']) From 89cc3b060b8d1a8dea11eee5a8826b4aaec48e14 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 15:36:53 +0000 Subject: [PATCH 246/332] Rename `_cleanup_node` function to `cleanup_node` --- node_cli/cli/node.py | 4 ++-- node_cli/cli/passive_node.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index bdb4a3b7..55304356 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -260,7 +260,7 @@ def version(raw: bool) -> None: print_meta_info(meta_info) -@node.command('cleanup', help='Remove all SKALE node data and containers..') +@node.command('cleanup', help='Remove all SKALE node data and containers.') @click.option( '--yes', is_flag=True, @@ -270,5 +270,5 @@ def version(raw: bool) -> None: ) @click.option('--prune', is_flag=True, help='Prune docker system.') @streamed_cmd -def _cleanup_node(prune): +def cleanup_node(prune): cleanup_skale(node_mode=NodeMode.ACTIVE, prune=prune) diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index 7c7ce7d7..9c92c767 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -73,7 +73,7 @@ def _update_passive(env_file, unsafe_ok): update_passive(env_file) -@passive_node.command('cleanup', help='Remove all passive SKALE node data and containers.') +@passive_node.command('cleanup', help='Remove all SKALE node data and containers.') @click.option( '--yes', is_flag=True, @@ -83,5 +83,5 @@ def _update_passive(env_file, unsafe_ok): ) @click.option('--prune', is_flag=True, help='Prune docker system.') @streamed_cmd -def _cleanup_node(prune): +def cleanup_node(prune): cleanup_skale(node_mode=NodeMode.PASSIVE, prune=prune) \ No newline at end of file From fc2b09a05b90d486b79ab11e9e5f941d9ef3219c Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 17:42:20 +0000 Subject: [PATCH 247/332] Update SKALE passive node clean up tests --- tests/cli/passive_node_test.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/cli/passive_node_test.py b/tests/cli/passive_node_test.py index 94cde559..790ad5f3 100644 --- a/tests/cli/passive_node_test.py +++ b/tests/cli/passive_node_test.py @@ -22,12 +22,12 @@ import mock -from node_cli.cli.passive_node import _cleanup_passive, _init_passive, _update_passive +from node_cli.cli.passive_node import cleanup_node, _init_passive, _update_passive from node_cli.configs import NODE_DATA_PATH, SKALE_DIR from node_cli.core.node_options import NodeOptions from node_cli.utils.helper import init_default_logger from node_cli.utils.meta import CliMeta -from node_cli.utils.node_type import NodeType +from node_cli.utils.node_type import NodeType, NodeMode from tests.conftest import set_env_var from tests.helper import run_command, subprocess_run_mock from tests.resources_test import BIG_DISK_SIZE @@ -127,12 +127,12 @@ def test_update_passive(passive_user_conf, mocked_g_config): assert result.exit_code == 0 -def test_cleanup_passive(mocked_g_config): +def test_cleanup_node(mocked_g_config): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), - mock.patch('node_cli.core.node.cleanup_passive_op'), + mock.patch('node_cli.core.node.cleanup_skale_op') as cleanup_mock, mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), @@ -143,5 +143,6 @@ def test_cleanup_passive(mocked_g_config): return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), ): - result = run_command(_cleanup_passive, ['--yes']) + result = run_command(cleanup_node, ['--yes']) assert result.exit_code == 0 + cleanup_mock.assert_called_once_with(node_mode=NodeMode.PASSIVE, prune=False, env={'SCHAIN_NAME': 'test'}) From 2780195679598e0b34d85220b4c9fea6d5f279a2 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 17:53:54 +0000 Subject: [PATCH 248/332] Apply formatting according to Ruff rules --- tests/cli/passive_node_test.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/cli/passive_node_test.py b/tests/cli/passive_node_test.py index 790ad5f3..c9ea9527 100644 --- a/tests/cli/passive_node_test.py +++ b/tests/cli/passive_node_test.py @@ -145,4 +145,5 @@ def test_cleanup_node(mocked_g_config): ): result = run_command(cleanup_node, ['--yes']) assert result.exit_code == 0 - cleanup_mock.assert_called_once_with(node_mode=NodeMode.PASSIVE, prune=False, env={'SCHAIN_NAME': 'test'}) + cleanup_mock.assert_called_once_with( + node_mode=NodeMode.PASSIVE, prune=False, env={'SCHAIN_NAME': 'test'}) From f18fbbf00939523423b4c4bf0ee7367c610b2180 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 18:03:34 +0000 Subject: [PATCH 249/332] Add SKALE active node clean up test --- tests/cli/node_test.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index bfd5263b..b31cddc0 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -30,6 +30,7 @@ _turn_off, _turn_on, backup_node, + cleanup_node, node_info, register_node, remove_node_from_maintenance, @@ -473,3 +474,23 @@ def test_node_version(meta_file_v2): result.output == "{'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_version': '1.1.2'}\n" ) + +def test_cleanup_node(mocked_g_config): + pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) + + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.core.node.cleanup_skale_op') as cleanup_mock, + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), + mock.patch('node_cli.core.node.compose_node_env', return_value={}), + mock.patch( + 'node_cli.core.node.CliMetaManager.get_meta_info', + return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), + ), + ): + result = run_command(cleanup_node, ['--yes']) + assert result.exit_code == 0 + cleanup_mock.assert_called_once_with(node_mode=NodeMode.ACTIVE, prune=False, env={}) \ No newline at end of file From 3f11ac2af292bc9629ba9b3d9fb25189c59e91a8 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 19:12:23 +0000 Subject: [PATCH 250/332] Add `prune` option for FAIR node clean up functions --- node_cli/fair/common.py | 5 ++--- node_cli/operations/fair.py | 5 ++++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index dc91e26e..23c88cd5 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -81,14 +81,13 @@ def init( @check_user -def cleanup(node_mode: NodeMode) -> None: +def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) env = compose_node_env( SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode ) - cleanup_fair_op(node_mode=node_mode, env=env) + cleanup_fair_op(node_mode=node_mode, env=env, prune=prune) logger.info('Fair node was cleaned up, all containers and data removed') - cleanup_docker_configuration() @check_inited diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 689b4a51..ca7baef5 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -63,6 +63,7 @@ remove_dynamic_containers, start_container_by_name, stop_container_by_name, + system_prune, wait_for_container, ) from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool @@ -282,8 +283,10 @@ def restore(node_mode: NodeMode, env, backup_path, config_only=False): return True -def cleanup(node_mode: NodeMode, env: dict) -> None: +def cleanup(node_mode: NodeMode, env: dict, prune: bool = False) -> None: turn_off(env, node_type=NodeType.FAIR, node_mode=node_mode) + if prune: + system_prune() cleanup_no_lvm_datadir() rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) From 64fc97f385275d61d7910e25fbe71954572b044a Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 20:17:31 +0000 Subject: [PATCH 251/332] Delete unused import --- node_cli/fair/common.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 23c88cd5..47bfc50a 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -22,7 +22,6 @@ from node_cli.configs import INIT_TIMEOUT, SKALE_DIR, TM_INIT_TIMEOUT from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH -from node_cli.core.docker_config import cleanup_docker_configuration from node_cli.core.host import save_env_params from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.core.node_options import upsert_node_mode From 9e8f4896ee284ac3b1deda1c1a6fb6e08fbd25bd Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 20:20:34 +0000 Subject: [PATCH 252/332] Add test for FAIR active node cleanup --- tests/cli/fair_cli_test.py | 30 ++++++++++++++++++++++++++++-- 1 file changed, 28 insertions(+), 2 deletions(-) diff --git a/tests/cli/fair_cli_test.py b/tests/cli/fair_cli_test.py index 51fa0a9b..9d62d058 100644 --- a/tests/cli/fair_cli_test.py +++ b/tests/cli/fair_cli_test.py @@ -2,7 +2,6 @@ from unittest import mock from click.testing import CliRunner - from node_cli.cli.fair_boot import ( init_boot, register_boot, @@ -10,11 +9,16 @@ ) from node_cli.cli.fair_node import ( backup_node, + cleanup_node, migrate_node, exit_node, restore_node, ) - +from node_cli.configs import SKALE_DIR +from node_cli.utils.node_type import NodeMode +from node_cli.utils.meta import CliMeta +from tests.helper import run_command, subprocess_run_mock +from tests.resources_test import BIG_DISK_SIZE @mock.patch('node_cli.cli.fair_node.restore_fair') def test_fair_node_restore(mock_restore_core, valid_env_file, tmp_path): @@ -111,3 +115,25 @@ def test_fair_node_exit(mock_exit_core): assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' mock_exit_core.assert_called_once() + + +def test_cleanup_node(mocked_g_config): + pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) + + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.fair.common.cleanup_fair_op') as cleanup_mock, + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), + mock.patch('node_cli.fair.common.compose_node_env', return_value={'SCHAIN_NAME': 'test'}), + mock.patch( + 'node_cli.core.node.CliMetaManager.get_meta_info', + return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), + ), + ): + result = run_command(cleanup_node, ['--yes']) + assert result.exit_code == 0 + cleanup_mock.assert_called_once_with( + node_mode=NodeMode.ACTIVE, prune=False, env={'SCHAIN_NAME': 'test'}) From 59781ce2333374216bf5b165cd2833c76621a13a Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 29 Oct 2025 20:40:14 +0000 Subject: [PATCH 253/332] Add test for FAIR passive node cleanup --- tests/cli/fair_passive_node_test.py | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/tests/cli/fair_passive_node_test.py b/tests/cli/fair_passive_node_test.py index 34ececf9..d0e94634 100644 --- a/tests/cli/fair_passive_node_test.py +++ b/tests/cli/fair_passive_node_test.py @@ -3,9 +3,11 @@ import mock -from node_cli.cli.passive_fair_node import init_passive_node, update_node +from node_cli.cli.passive_fair_node import cleanup_node, init_passive_node, update_node from node_cli.configs import NODE_DATA_PATH, SKALE_DIR from node_cli.utils.helper import init_default_logger +from node_cli.utils.meta import CliMeta +from node_cli.utils.node_type import NodeMode from tests.helper import run_command, subprocess_run_mock from tests.resources_test import BIG_DISK_SIZE @@ -84,3 +86,25 @@ def test_update_fair_passive(mocked_g_config, tmp_path): ): result = run_command(update_node, [env_file.as_posix(), '--yes']) assert result.exit_code == 0 + + +def test_cleanup_node(mocked_g_config): + pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) + + with ( + mock.patch('subprocess.run', new=subprocess_run_mock), + mock.patch('node_cli.fair.common.cleanup_fair_op') as cleanup_mock, + mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), + mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), + mock.patch('node_cli.operations.base.configure_nftables'), + mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), + mock.patch('node_cli.fair.common.compose_node_env', return_value={'SCHAIN_NAME': 'test'}), + mock.patch( + 'node_cli.core.node.CliMetaManager.get_meta_info', + return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), + ), + ): + result = run_command(cleanup_node, ['--yes']) + assert result.exit_code == 0 + cleanup_mock.assert_called_once_with( + node_mode=NodeMode.PASSIVE, prune=False, env={'SCHAIN_NAME': 'test'}) From 8840987ef70276dfb2561b0476f78e86a7947a40 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 30 Oct 2025 13:35:12 +0000 Subject: [PATCH 254/332] Update Fair node tests --- tests/fair/fair_node_test.py | 20 +++++--------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index c3789ff1..4b63aec7 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -158,13 +158,11 @@ def test_migrate_from_boot( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.common.cleanup_docker_configuration') @mock.patch('node_cli.fair.common.cleanup_fair_op') @mock.patch('node_cli.fair.common.compose_node_env') def test_cleanup_success( mock_compose_env, mock_cleanup_fair_op, - mock_cleanup_docker_config, mock_is_user_valid, inited_node, resource_alloc, @@ -182,18 +180,16 @@ def test_cleanup_success( node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, ) - mock_cleanup_fair_op.assert_called_once_with(node_mode=NodeMode.ACTIVE, env=mock_env) - mock_cleanup_docker_config.assert_called_once() + mock_cleanup_fair_op.assert_called_once_with( + node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.common.cleanup_docker_configuration') @mock.patch('node_cli.fair.common.cleanup_fair_op') @mock.patch('node_cli.fair.common.compose_node_env') def test_cleanup_calls_operations_in_correct_order( mock_compose_env, mock_cleanup_fair_op, - mock_cleanup_docker_config, mock_is_user_valid, inited_node, resource_alloc, @@ -208,26 +204,22 @@ def test_cleanup_calls_operations_in_correct_order( manager = mock.Mock() manager.attach_mock(mock_compose_env, 'compose_env') manager.attach_mock(mock_cleanup_fair_op, 'cleanup_fair_op') - manager.attach_mock(mock_cleanup_docker_config, 'cleanup_docker_config') cleanup(node_mode=NodeMode.ACTIVE) expected_calls = [ mock.call.compose_env(mock.ANY, save=False, node_type=mock.ANY, node_mode=NodeMode.ACTIVE), - mock.call.cleanup_fair_op(node_mode=NodeMode.ACTIVE, env=mock_env), - mock.call.cleanup_docker_config(), + mock.call.cleanup_fair_op(node_mode=NodeMode.ACTIVE, env=mock_env, prune=False), ] manager.assert_has_calls(expected_calls, any_order=False) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.common.cleanup_docker_configuration') @mock.patch('node_cli.fair.common.cleanup_fair_op', side_effect=Exception('Cleanup failed')) @mock.patch('node_cli.fair.common.compose_node_env') def test_cleanup_continues_after_fair_op_error( mock_compose_env, mock_cleanup_fair_op, - mock_cleanup_docker_config, mock_is_user_valid, inited_node, resource_alloc, @@ -241,8 +233,8 @@ def test_cleanup_continues_after_fair_op_error( cleanup(node_mode=NodeMode.ACTIVE) mock_compose_env.assert_called_once() - mock_cleanup_fair_op.assert_called_once_with(node_mode=NodeMode.ACTIVE, env=mock_env) - mock_cleanup_docker_config.assert_not_called() + mock_cleanup_fair_op.assert_called_once_with( + node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=False) @@ -269,7 +261,6 @@ def test_cleanup_fails_when_not_inited(ensure_meta_removed, active_node_option): @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.fair.common.cleanup_docker_configuration') @mock.patch('node_cli.fair.common.cleanup_fair_op') @mock.patch('node_cli.fair.common.compose_node_env') @mock.patch('node_cli.fair.common.logger') @@ -277,7 +268,6 @@ def test_cleanup_logs_success_message( mock_logger, mock_compose_env, mock_cleanup_fair_op, - mock_cleanup_docker_config, mock_is_user_valid, inited_node, resource_alloc, From b9346b7dc13de21764a3bdc3b3ab743866a00d07 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 30 Oct 2025 15:16:58 +0000 Subject: [PATCH 255/332] Fix FAIR node tests --- tests/cli/fair_cli_test.py | 2 +- tests/cli/fair_passive_node_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/cli/fair_cli_test.py b/tests/cli/fair_cli_test.py index 9d62d058..3c6b9ef5 100644 --- a/tests/cli/fair_cli_test.py +++ b/tests/cli/fair_cli_test.py @@ -117,7 +117,7 @@ def test_fair_node_exit(mock_exit_core): mock_exit_core.assert_called_once() -def test_cleanup_node(mocked_g_config): +def test_cleanup_node(mocked_g_config, inited_node): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( diff --git a/tests/cli/fair_passive_node_test.py b/tests/cli/fair_passive_node_test.py index d0e94634..a8b7818a 100644 --- a/tests/cli/fair_passive_node_test.py +++ b/tests/cli/fair_passive_node_test.py @@ -71,7 +71,7 @@ def test_init_fair_passive_snapshot_any(mocked_g_config, tmp_path): assert result.exit_code == 0 -def test_update_fair_passive(mocked_g_config, tmp_path): +def test_update_fair_passive(mocked_g_config, tmp_path, clean_node_options): env_file = tmp_path / 'test-env' env_file.write_text('') pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) From 17cdb8e89eb48db86405cf5b5b470d1cd2b97ecc Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 31 Oct 2025 19:25:25 +0000 Subject: [PATCH 256/332] Add test for SKALE node cleanup --- tests/core/core_node_test.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 7188ffae..5eccc1d8 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -12,7 +12,9 @@ from node_cli.configs import NODE_DATA_PATH, SCHAINS_MNT_DIR_REGULAR, SCHAINS_MNT_DIR_SINGLE_CHAIN from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH +from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.core.node import ( + cleanup, compose_node_env, get_expected_container_names, init, @@ -471,3 +473,32 @@ def test_is_update_safe_when_api_call_fails( mock_requests_get.side_effect = requests.exceptions.ConnectionError('Test connection error') assert is_update_safe(node_mode=node_mode) is False mock_requests_get.assert_called_once() + + +@mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) +@mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True) +@mock.patch('node_cli.core.node.cleanup_skale_op') +@mock.patch('node_cli.core.node.compose_node_env') +def test_cleanup_success( + mock_compose_env, + mock_cleanup_skale_op, + mock_node_inited, + mock_is_user_valid, + inited_node, + resource_alloc, + meta_file_v3, + active_node_option, +): + mock_env = {'ENV_TYPE': 'devnet'} + mock_compose_env.return_value = mock_env + + cleanup(node_mode=NodeMode.ACTIVE) + + mock_compose_env.assert_called_once_with( + SKALE_DIR_ENV_FILEPATH, + save=False, + node_type=NodeType.SKALE, + node_mode=NodeMode.ACTIVE, + ) + mock_cleanup_skale_op.assert_called_once_with( + node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) \ No newline at end of file From 894abc4f438dfc70f9a05e8b586a8cdff7c7d943 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 31 Oct 2025 19:39:32 +0000 Subject: [PATCH 257/332] Unmount `no_lvm_datadir` volume after cleanup --- node_cli/core/schains.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index fe3aeeb4..ac457371 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -299,6 +299,6 @@ def cleanup_no_lvm_datadir( if folder_name != 'shared-space': logger.info('Removing datadir content for %s', folder_path) cleanup_datadir_content(folder_path) - logger.info('Removing datadir content for %s', folder_path) if os.path.isdir(folder_path): shutil.rmtree(folder_path) + run_cmd(['umount', base_path]) From 438b1f4f744f933b1b4b4f97e1ebd23906a17e99 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 3 Nov 2025 17:17:27 +0000 Subject: [PATCH 258/332] Remove `@check_inited` decorator for SKALE node cleanup function --- node_cli/core/node.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 18bf473f..30036fcf 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -225,7 +225,6 @@ def update_passive(env_filepath: str, unsafe_ok: bool = False) -> None: logger.info('Node update finished') -@check_inited @check_user def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) From 78c46141dc764dad5962d4ffda59ca53743c2da0 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 3 Nov 2025 17:18:40 +0000 Subject: [PATCH 259/332] Update SKALE node cleanup test --- tests/core/core_node_test.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 5eccc1d8..83117c29 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -476,13 +476,11 @@ def test_is_update_safe_when_api_call_fails( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) -@mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True) @mock.patch('node_cli.core.node.cleanup_skale_op') @mock.patch('node_cli.core.node.compose_node_env') def test_cleanup_success( mock_compose_env, mock_cleanup_skale_op, - mock_node_inited, mock_is_user_valid, inited_node, resource_alloc, From 2fed20e73285a3da6c54378dfa17701562f198c1 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 3 Nov 2025 17:58:24 +0000 Subject: [PATCH 260/332] Fix `test_cleanup_passive_datadir` test --- tests/core/core_schains_test.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/core/core_schains_test.py b/tests/core/core_schains_test.py index ee868242..7016a262 100644 --- a/tests/core/core_schains_test.py +++ b/tests/core/core_schains_test.py @@ -80,6 +80,9 @@ def test_cleanup_passive_datadir(tmp_passive_datadir): hash_path = snapshot_folder.joinpath('snapshot_hash.txt') hash_path.touch() - with mock.patch('node_cli.core.schains.rm_btrfs_subvolume'): + with ( + mock.patch('node_cli.core.schains.rm_btrfs_subvolume'), + mock.patch('node_cli.core.schains.run_cmd'), + ): cleanup_no_lvm_datadir(schain_name, base_path=tmp_passive_datadir) assert not os.path.isdir(base_folder) From 26180971d8916cea9bbec30db89ed63ab0d71f68 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 4 Nov 2025 17:05:41 +0000 Subject: [PATCH 261/332] Skip user config validation during cleanup process --- node_cli/configs/user.py | 4 +++- node_cli/core/node.py | 9 ++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 3aa7b831..2625f200 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -143,6 +143,7 @@ def get_validated_user_config( node_mode: NodeMode, env_filepath: str = SKALE_DIR_ENV_FILEPATH, is_fair_boot: bool = False, + skip_usr_conf_validation: bool = False, ) -> BaseUserConfig: params = parse_env_file(env_filepath) user_config_class = get_user_config_class( @@ -160,7 +161,8 @@ def get_validated_user_config( params = to_lower_keys(params) user_config = user_config_class(**params) - validate_user_config(user_config) + if not skip_usr_conf_validation: + validate_user_config(user_config) return user_config diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 30036fcf..ad2f2713 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -229,7 +229,11 @@ def update_passive(env_filepath: str, unsafe_ok: bool = False) -> None: def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.SKALE, node_mode=node_mode + SKALE_DIR_ENV_FILEPATH, + save=False, + node_type=NodeType.SKALE, + node_mode=node_mode, + skip_usr_conf_validation=True, ) cleanup_skale_op(node_mode=node_mode, env=env, prune=prune) logger.info('SKALE node was cleaned up, all containers and data removed') @@ -244,6 +248,7 @@ def compose_node_env( pull_config_for_schain: Optional[str] = None, save: bool = True, is_fair_boot: bool = False, + skip_usr_conf_validation: bool = False, ) -> dict[str, str]: if env_filepath is not None: user_config = get_validated_user_config( @@ -251,6 +256,7 @@ def compose_node_env( node_mode=node_mode, env_filepath=env_filepath, is_fair_boot=is_fair_boot, + skip_usr_conf_validation=skip_usr_conf_validation, ) if save: save_env_params(env_filepath) @@ -259,6 +265,7 @@ def compose_node_env( node_type=node_type, env_filepath=INIT_ENV_FILEPATH, is_fair_boot=is_fair_boot, + skip_usr_conf_validation=skip_usr_conf_validation, ) if node_mode == NodeMode.PASSIVE or node_type == NodeType.FAIR: From c8b4760b483d41ae594fee82baec8625ccb4ed6e Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 4 Nov 2025 17:50:38 +0000 Subject: [PATCH 262/332] Update `test_cleanup_success` for SKALE node --- tests/core/core_node_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 83117c29..e53a3351 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -497,6 +497,7 @@ def test_cleanup_success( save=False, node_type=NodeType.SKALE, node_mode=NodeMode.ACTIVE, + skip_usr_conf_validation=True, ) mock_cleanup_skale_op.assert_called_once_with( node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) \ No newline at end of file From e5e7ee0349064cae0dcafd2d79088a24b54ea747 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 5 Nov 2025 19:04:09 +0000 Subject: [PATCH 263/332] Skip user config validation during cleanup of FAIR nodes --- node_cli/fair/common.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 47bfc50a..458d06f2 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -83,7 +83,11 @@ def init( def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode + SKALE_DIR_ENV_FILEPATH, + save=False, + node_type=NodeType.FAIR, + node_mode=node_mode, + skip_usr_conf_validation=True, ) cleanup_fair_op(node_mode=node_mode, env=env, prune=prune) logger.info('Fair node was cleaned up, all containers and data removed') From ee186623b6d37f263fcf02e5a7dc064c945b838e Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 5 Nov 2025 19:26:03 +0000 Subject: [PATCH 264/332] Update tests --- tests/fair/fair_node_test.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 4b63aec7..7fa2e912 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -179,6 +179,7 @@ def test_cleanup_success( save=False, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, + skip_usr_conf_validation=True, ) mock_cleanup_fair_op.assert_called_once_with( node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) @@ -208,7 +209,12 @@ def test_cleanup_calls_operations_in_correct_order( cleanup(node_mode=NodeMode.ACTIVE) expected_calls = [ - mock.call.compose_env(mock.ANY, save=False, node_type=mock.ANY, node_mode=NodeMode.ACTIVE), + mock.call.compose_env( + mock.ANY, + save=False, + node_type=mock.ANY, + node_mode=NodeMode.ACTIVE, + skip_usr_conf_validation=True), mock.call.cleanup_fair_op(node_mode=NodeMode.ACTIVE, env=mock_env, prune=False), ] manager.assert_has_calls(expected_calls, any_order=False) From 2be08b8735bf96f8665d063d40c0b00ce00d28d3 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 18 Nov 2025 12:05:47 +0000 Subject: [PATCH 265/332] Rename `cleanup_active` function to `cleanup_lvm_datadir` and move to another module --- node_cli/core/schains.py | 12 ++++++++++++ node_cli/operations/base.py | 14 ++------------ 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index ac457371..64c16031 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -38,6 +38,7 @@ from node_cli.utils.docker_utils import ensure_volume, is_volume_exists from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import ( + cleanup_dir_content, error_exit, get_request, is_btrfs_subvolume, @@ -302,3 +303,14 @@ def cleanup_no_lvm_datadir( if os.path.isdir(folder_path): shutil.rmtree(folder_path) run_cmd(['umount', base_path]) + + +def cleanup_lvm_datadir(): + logger.info('Starting cleanup for active node...') + logger.info('Unmounting /mnt/schains-shared-space...') + run_cmd(['sudo', 'umount', '/mnt/schains-shared-space'], check_code=False) + logger.info('Cleaning up /mnt directory content...') + cleanup_dir_content('/mnt/') + logger.info('Removing LVM volume group "schains"...') + run_cmd(['sudo', 'lvremove', '-f', 'schains'], check_code=False) + logger.info('Active node cleanup finished.') \ No newline at end of file diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 26db1c2e..28be076d 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -49,6 +49,7 @@ ) from node_cli.core.resources import init_shared_space_volume, update_resource_allocation from node_cli.core.schains import ( + cleanup_lvm_datadir, cleanup_no_lvm_datadir, update_node_cli_schain_status, ) @@ -442,17 +443,6 @@ def cleanup_passive(env, schain_name: str) -> None: rm_dir(SKALE_DIR) -def cleanup_active(): - logger.info('Starting cleanup for active node...') - logger.info('Unmounting /mnt/schains-shared-space...') - run_cmd(['sudo', 'umount', '/mnt/schains-shared-space'], check_code=False) - logger.info('Cleaning up /mnt directory content...') - cleanup_dir_content('/mnt/') - logger.info('Removing LVM volume group "schains"...') - run_cmd(['sudo', 'lvremove', '-f', 'schains'], check_code=False) - logger.info('Active node cleanup finished.') - - def cleanup(node_mode: NodeMode, env: dict, prune: bool = False) -> None: turn_off(env, node_type=NodeType.SKALE, node_mode=node_mode) if prune: @@ -461,7 +451,7 @@ def cleanup(node_mode: NodeMode, env: dict, prune: bool = False) -> None: schain_name = env['SCHAIN_NAME'] cleanup_no_lvm_datadir(chain_name=schain_name) else: - cleanup_active() + cleanup_lvm_datadir() rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) cleanup_dir_content(NFTABLES_CHAIN_FOLDER_PATH) From 11300451ae14a2806f7c21737ff7425ee1ca6ebc Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 18 Nov 2025 12:10:46 +0000 Subject: [PATCH 266/332] Rename `skip_usr_conf_validation` parameter to `skip_user_conf_validation` --- node_cli/configs/user.py | 4 ++-- node_cli/core/node.py | 8 ++++---- node_cli/fair/common.py | 2 +- tests/core/core_node_test.py | 2 +- tests/fair/fair_node_test.py | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 2625f200..59ecda77 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -143,7 +143,7 @@ def get_validated_user_config( node_mode: NodeMode, env_filepath: str = SKALE_DIR_ENV_FILEPATH, is_fair_boot: bool = False, - skip_usr_conf_validation: bool = False, + skip_user_conf_validation: bool = False, ) -> BaseUserConfig: params = parse_env_file(env_filepath) user_config_class = get_user_config_class( @@ -161,7 +161,7 @@ def get_validated_user_config( params = to_lower_keys(params) user_config = user_config_class(**params) - if not skip_usr_conf_validation: + if not skip_user_conf_validation: validate_user_config(user_config) return user_config diff --git a/node_cli/core/node.py b/node_cli/core/node.py index ad2f2713..725deee3 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -233,7 +233,7 @@ def cleanup(node_mode: NodeMode, prune: bool = False) -> None: save=False, node_type=NodeType.SKALE, node_mode=node_mode, - skip_usr_conf_validation=True, + skip_user_conf_validation=True, ) cleanup_skale_op(node_mode=node_mode, env=env, prune=prune) logger.info('SKALE node was cleaned up, all containers and data removed') @@ -248,7 +248,7 @@ def compose_node_env( pull_config_for_schain: Optional[str] = None, save: bool = True, is_fair_boot: bool = False, - skip_usr_conf_validation: bool = False, + skip_user_conf_validation: bool = False, ) -> dict[str, str]: if env_filepath is not None: user_config = get_validated_user_config( @@ -256,7 +256,7 @@ def compose_node_env( node_mode=node_mode, env_filepath=env_filepath, is_fair_boot=is_fair_boot, - skip_usr_conf_validation=skip_usr_conf_validation, + skip_user_conf_validation=skip_user_conf_validation, ) if save: save_env_params(env_filepath) @@ -265,7 +265,7 @@ def compose_node_env( node_type=node_type, env_filepath=INIT_ENV_FILEPATH, is_fair_boot=is_fair_boot, - skip_usr_conf_validation=skip_usr_conf_validation, + skip_user_conf_validation=skip_user_conf_validation, ) if node_mode == NodeMode.PASSIVE or node_type == NodeType.FAIR: diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 458d06f2..968891d4 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -87,7 +87,7 @@ def cleanup(node_mode: NodeMode, prune: bool = False) -> None: save=False, node_type=NodeType.FAIR, node_mode=node_mode, - skip_usr_conf_validation=True, + skip_user_conf_validation=True, ) cleanup_fair_op(node_mode=node_mode, env=env, prune=prune) logger.info('Fair node was cleaned up, all containers and data removed') diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index e53a3351..953e7ca5 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -497,7 +497,7 @@ def test_cleanup_success( save=False, node_type=NodeType.SKALE, node_mode=NodeMode.ACTIVE, - skip_usr_conf_validation=True, + skip_user_conf_validation=True, ) mock_cleanup_skale_op.assert_called_once_with( node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) \ No newline at end of file diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 7fa2e912..a0408caa 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -179,7 +179,7 @@ def test_cleanup_success( save=False, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, - skip_usr_conf_validation=True, + skip_user_conf_validation=True, ) mock_cleanup_fair_op.assert_called_once_with( node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) @@ -214,7 +214,7 @@ def test_cleanup_calls_operations_in_correct_order( save=False, node_type=mock.ANY, node_mode=NodeMode.ACTIVE, - skip_usr_conf_validation=True), + skip_user_conf_validation=True), mock.call.cleanup_fair_op(node_mode=NodeMode.ACTIVE, env=mock_env, prune=False), ] manager.assert_has_calls(expected_calls, any_order=False) From bec919eb04c2e5ca7c0598c8712bea00b1b952a1 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 18 Nov 2025 12:15:42 +0000 Subject: [PATCH 267/332] Delete unused import --- node_cli/operations/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 28be076d..cbd0cbc7 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -72,7 +72,7 @@ remove_dynamic_containers, system_prune, ) -from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool, run_cmd +from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool from node_cli.utils.meta import CliMetaManager, FairCliMetaManager from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks From 371d71271c114df2a175b71b1238024e837b1fd8 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 19 Nov 2025 20:23:36 +0000 Subject: [PATCH 268/332] Add pyproject.toml --- pyproject.toml | 64 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 pyproject.toml diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..3aa139d4 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,64 @@ +[build-system] +requires = ["setuptools>=75", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "node-cli" +version = "3.2.0" +description = "Node CLI tools" +readme = "README.md" +requires-python = ">=3.11" +license = { file = "LICENSE" } + +authors = [ + { name = "SKALE Labs", email = "support@skalelabs.com" } +] + +dependencies = [ + "click==8.1.7", + "distro==1.9.0", + "docker==6.0.1", + "texttable==1.6.7", + "python-dateutil==2.8.2", + "Jinja2==3.1.4", + "psutil==5.9.4", + "python-dotenv==0.21.0", + "terminaltables==3.1.10", + "requests==2.28.1", + "GitPython==3.1.41", + "packaging==23.0", + "python-debian==0.1.49", + "PyYAML==6.0.3", + "pyOpenSSL==24.2.1", + "MarkupSafe==3.0.2", + "Flask==2.3.3", + "itsdangerous==2.1.2", + "cryptography==42.0.4", + "filelock==3.0.12", + "sh==1.14.2", + "python-crontab==2.6.0", + "requests-mock==1.12.1", + "redis==6.2.0", +] + +[project.optional-dependencies] +dev = [ + "ruff==0.14.0", + "bumpversion==0.6.0", + "pytest==8.4.2", + "pytest-cov==7.0.0", + "twine==4.0.2", + "mock==4.0.3", + "freezegun==1.5.5", + "PyInstaller>=6.14.0", +] + +[tool.setuptools] +package-dir = { "" = "." } + +[tool.setuptools.packages.find] +where = ["."] + +[tool.ruff] +line-length = 100 +target-version = "py311" From 0eeb193f0de9831b68a35c1cbf62b72b21d5af7e Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 13:31:44 +0000 Subject: [PATCH 269/332] Add classifiers and keywords --- pyproject.toml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3aa139d4..fa2c13cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,10 +9,17 @@ description = "Node CLI tools" readme = "README.md" requires-python = ">=3.11" license = { file = "LICENSE" } - +keywords = ["skale", "cli"] authors = [ { name = "SKALE Labs", email = "support@skalelabs.com" } ] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: GNU Affero General Public License v3", + "Natural Language :: English", + "Programming Language :: Python :: 3.11", +] dependencies = [ "click==8.1.7", From 798d2739d9a541b3223b3daa4ba2f8cd2f927fd2 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 13:32:55 +0000 Subject: [PATCH 270/332] Add homepage url --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index fa2c13cb..b33b8efe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,9 @@ dependencies = [ "redis==6.2.0", ] +[project.urls] +Homepage = "https://github.com/skalenetwork/node-cli" + [project.optional-dependencies] dev = [ "ruff==0.14.0", From 1f458f5dcffb88f0bda362282fef9a39d8410fd4 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 13:34:20 +0000 Subject: [PATCH 271/332] Exclude the tests directory from the final package --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index b33b8efe..088b9059 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,6 +68,7 @@ package-dir = { "" = "." } [tool.setuptools.packages.find] where = ["."] +exclude = ["tests"] [tool.ruff] line-length = 100 From e6d4b126f1db5c6cb2d4d2d53820649c180be811 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 13:40:27 +0000 Subject: [PATCH 272/332] Set `include_package_data` to `True` --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 088b9059..8486cf81 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,7 @@ dev = [ [tool.setuptools] package-dir = { "" = "." } +include-package-data = true [tool.setuptools.packages.find] where = ["."] From 87ecf0d75b49f517151f681cb744a8b9a676ecac Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 15:20:51 +0000 Subject: [PATCH 273/332] Bump Python version to 3.13 --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8486cf81..2864eb25 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ name = "node-cli" version = "3.2.0" description = "Node CLI tools" readme = "README.md" -requires-python = ">=3.11" +requires-python = ">=3.13" license = { file = "LICENSE" } keywords = ["skale", "cli"] authors = [ @@ -18,7 +18,7 @@ classifiers = [ "Intended Audience :: Developers", "License :: OSI Approved :: GNU Affero General Public License v3", "Natural Language :: English", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.13", ] dependencies = [ @@ -73,4 +73,4 @@ exclude = ["tests"] [tool.ruff] line-length = 100 -target-version = "py311" +target-version = "py313" From 74c5719aa8352b80aadd8a6476ee0cb0caa57f78 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 18:55:59 +0000 Subject: [PATCH 274/332] Update dependencies --- pyproject.toml | 56 +++++++++++++++++++++++++------------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2864eb25..bd45c729 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,30 +22,31 @@ classifiers = [ ] dependencies = [ - "click==8.1.7", + "click==8.3.1", "distro==1.9.0", - "docker==6.0.1", - "texttable==1.6.7", - "python-dateutil==2.8.2", - "Jinja2==3.1.4", - "psutil==5.9.4", - "python-dotenv==0.21.0", + "docker==7.1.0", + "texttable==1.7.0", + "python-dateutil==2.9.0", + "Jinja2==3.1.6", + "psutil==7.1.3", + "python-dotenv==1.0.1", "terminaltables==3.1.10", - "requests==2.28.1", - "GitPython==3.1.41", - "packaging==23.0", - "python-debian==0.1.49", - "PyYAML==6.0.3", - "pyOpenSSL==24.2.1", - "MarkupSafe==3.0.2", - "Flask==2.3.3", - "itsdangerous==2.1.2", - "cryptography==42.0.4", - "filelock==3.0.12", - "sh==1.14.2", - "python-crontab==2.6.0", + "requests==2.32.3", + "GitPython==3.1.43", + "packaging==24.1", + "python-debian==1.0.1", + "PyYAML==6.0.1", + "pyOpenSSL==25.3.0", + "MarkupSafe==2.1.5", + "Flask==3.0.3", + "itsdangerous==2.2.0", + "cryptography==45.0.7", + "filelock==3.15.4", + "sh==2.0.6", + "python-crontab==3.0.0", "requests-mock==1.12.1", - "redis==6.2.0", + "redis==5.0.7", + "PyInstaller==6.16.0", ] [project.urls] @@ -53,14 +54,13 @@ Homepage = "https://github.com/skalenetwork/node-cli" [project.optional-dependencies] dev = [ - "ruff==0.14.0", + "ruff==0.5.5", "bumpversion==0.6.0", - "pytest==8.4.2", - "pytest-cov==7.0.0", - "twine==4.0.2", - "mock==4.0.3", - "freezegun==1.5.5", - "PyInstaller>=6.14.0", + "pytest==8.2.2", + "pytest-cov==5.0.0", + "twine==5.1.1", + "mock==5.1.0", + "freezegun==1.5.1", ] [tool.setuptools] From 357d12d568a658d0a17685e03709e046c6d3ecd8 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 18:56:54 +0000 Subject: [PATCH 275/332] Delete outdated setup.py --- setup.py | 87 -------------------------------------------------------- 1 file changed, 87 deletions(-) delete mode 100644 setup.py diff --git a/setup.py b/setup.py deleted file mode 100644 index 5ea3e6ce..00000000 --- a/setup.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -import re -from setuptools import find_packages, setup - - -def read(*parts): - path = os.path.join(os.path.dirname(__file__), *parts) - f = open(path, 'r') - return f.read() - - -def find_version(*file_paths): - version_file = read(*file_paths) - version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) - if version_match: - return version_match.group(1) - raise RuntimeError("Couldn't parse version from file.") - - -extras_require = { - 'linter': [ - 'isort>=4.2.15,<5.10.2', - 'ruff==0.9.9', - ], - 'dev': [ - 'bumpversion==0.6.0', - 'pytest==8.3.2', - 'pytest-cov==5.0.0', - 'twine==4.0.2', - 'mock==4.0.3', - 'freezegun==1.2.2', - ], -} - -extras_require['dev'] = extras_require['linter'] + extras_require['dev'] - - -setup( - name='node-cli', - # *IMPORTANT*: Don't manually change the version here. - # Use the 'bumpversion' utility instead. - version=find_version('node_cli', 'cli', '__init__.py'), - include_package_data=True, - description='SKALE client tools', - long_description_markdown_filename='README.md', - author='SKALE Labs', - author_email='support@skalelabs.com', - url='https://github.com/skalenetwork/node-cli', - install_requires=[ - 'click==8.1.7', - 'PyInstaller==5.12.0', - 'distro==1.9.0', - 'docker==6.0.1', - 'texttable==1.6.7', - 'python-dateutil==2.8.2', - 'Jinja2==3.1.4', - 'psutil==5.9.4', - 'python-dotenv==0.21.0', - 'terminaltables==3.1.10', - 'requests==2.28.1', - 'GitPython==3.1.41', - 'packaging==23.0', - 'python-debian==0.1.49', - 'PyYAML==6.0', - 'pyOpenSSL==24.2.1', - 'MarkupSafe==3.0.2', - 'Flask==2.3.3', - 'itsdangerous==2.1.2', - 'cryptography==42.0.4', - 'filelock==3.0.12', - 'sh==1.14.2', - 'python-crontab==2.6.0', - 'requests-mock==1.12.1', - 'redis==6.2.0', - ], - python_requires='>=3.8,<4', - extras_require=extras_require, - keywords=['skale', 'cli'], - packages=find_packages(exclude=['tests']), - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU Affero General Public License v3', - 'Natural Language :: English', - 'Programming Language :: Python :: 3.11', - ], -) From 0c1e0d853b6595822a109980bfe0e577ee52a84d Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 19:16:23 +0000 Subject: [PATCH 276/332] Use `python:3.13-bookworm` --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index abc1fcd3..a7c14258 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-bookworm +FROM python:3.13-bookworm ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && apt install -y \ @@ -10,8 +10,8 @@ RUN apt-get update && apt install -y \ libffi-dev \ swig \ iptables \ - nftables \ - python3-nftables \ + nftables \ + python3-nftables \ libxslt-dev \ kmod From 1768b73cf2d2fdf5e568c3a5a873ed20d45d65b5 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 19:17:59 +0000 Subject: [PATCH 277/332] Bump Python and actions version --- .github/workflows/test.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 42173eeb..5a2500aa 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,9 +6,9 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - python-version: [3.11] + python-version: ['3.13'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: true @@ -16,7 +16,7 @@ jobs: run: git submodule update --init - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} From a80db48360f58d69847cd68c4f186a960ea403bd Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 19:19:14 +0000 Subject: [PATCH 278/332] Delete unused ruff.toml --- ruff.toml | 8 -------- 1 file changed, 8 deletions(-) delete mode 100644 ruff.toml diff --git a/ruff.toml b/ruff.toml deleted file mode 100644 index 978b38de..00000000 --- a/ruff.toml +++ /dev/null @@ -1,8 +0,0 @@ -line-length = 100 - -[format] -quote-style = "single" - -[lint] -# Add the `line-too-long` rule to the enforced rule set. -extend-select = ["E501"] From 239bcf1eb2dfffecbd592cabf6bd44a11fc1c6b6 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Thu, 20 Nov 2025 19:39:27 +0000 Subject: [PATCH 279/332] Handle `OSError` exception --- node_cli/utils/global_config.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/node_cli/utils/global_config.py b/node_cli/utils/global_config.py index 4347c6b0..974c573a 100644 --- a/node_cli/utils/global_config.py +++ b/node_cli/utils/global_config.py @@ -52,6 +52,10 @@ def generate_g_config_file(g_skale_dir: str, g_skale_conf_filepath: str) -> dict json.dump(g_config, outfile, indent=4) except PermissionError as e: logger.exception(e) - print('No permissions to write into /etc directory') + print(f'No permissions to write into {g_skale_dir} directory') + sys.exit(7) + except OSError as e: + logger.exception(e) + print(f'Error writing to {g_skale_conf_filepath}: {e}') sys.exit(7) return g_config From 37781b98d5db441330b9082ae5c475b4e42edd75 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 21 Nov 2025 14:07:53 +0000 Subject: [PATCH 280/332] Use 'Group' instead of deprecated 'MultiCommand' --- node_cli/main.py | 36 +++++++++++++++++++----------------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/node_cli/main.py b/node_cli/main.py index a18b4d67..479099ca 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -54,9 +54,20 @@ logger = logging.getLogger(__name__) -@click.group() -def cli(): - pass +@click.group(invoke_without_command=True) +@click.pass_context +def cli(ctx): + if ctx.invoked_subcommand is None: + print(ctx.get_help()) + ctx.exit(0) + + start_time = time.time() + init_logs_dir() + init_default_logger() + args = sys.argv + # todo: hide secret variables (passwords, private keys) + logger.debug(f'cmd: {" ".join(str(x) for x in args)}, v.{__version__}') + ctx.call_on_close(lambda: logger.debug('Execution time: %d seconds', time.time() - start_time)) @cli.command('version', help='Show SKALE node CLI version') @@ -84,10 +95,9 @@ def info(): ) -def get_sources_list() -> List[click.MultiCommand]: +def get_command_groups() -> List[click.Group]: if TYPE == NodeType.FAIR: return [ - cli, logs_cli, fair_boot_cli, fair_node_cli, @@ -99,7 +109,6 @@ def get_sources_list() -> List[click.MultiCommand]: ] else: return [ - cli, health_cli, schains_cli, logs_cli, @@ -122,19 +131,12 @@ def handle_exception(exc_type, exc_value, exc_traceback): sys.excepthook = handle_exception if __name__ == '__main__': - start_time = time.time() - init_logs_dir() - init_default_logger() - args = sys.argv - # todo: hide secret variables (passwords, private keys) - logger.debug(f'cmd: {" ".join(str(x) for x in args)}, v.{__version__}') - sources = get_sources_list() - cmd_collection = click.CommandCollection(sources=sources) + for group in get_command_groups(): + for cmd_name, cmd_obj in group.commands.items(): + cli.add_command(cmd_obj, cmd_name) try: - cmd_collection() + cli() except Exception as err: traceback.print_exc() - logger.debug('Execution time: %d seconds', time.time() - start_time) error_exit(err) - logger.debug('Execution time: %d seconds', time.time() - start_time) From 2c77239178b5398413dd34c86af994ffaad9bcb2 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 21 Nov 2025 14:24:19 +0000 Subject: [PATCH 281/332] Disable warnings from static analyzer --- node_cli/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/node_cli/main.py b/node_cli/main.py index 479099ca..9016f79d 100644 --- a/node_cli/main.py +++ b/node_cli/main.py @@ -97,7 +97,7 @@ def info(): def get_command_groups() -> List[click.Group]: if TYPE == NodeType.FAIR: - return [ + return [ # type: ignore logs_cli, fair_boot_cli, fair_node_cli, @@ -108,7 +108,7 @@ def get_command_groups() -> List[click.Group]: ssl_cli, ] else: - return [ + return [ # type: ignore health_cli, schains_cli, logs_cli, From 6c5b526a42b4fe8a26e33cbe9446930113fa8f54 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 21 Nov 2025 17:20:02 +0000 Subject: [PATCH 282/332] Replace `str_to_bool` with a new implementation without deleted `distutils` dependency --- node_cli/utils/helper.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index c9c9bfb0..dd1a7151 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -17,8 +17,6 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import distutils -import distutils.util import ipaddress import json import logging @@ -145,8 +143,14 @@ def get_username(): return os.environ.get('USERNAME') or os.environ.get('USER') -def str_to_bool(val): - return bool(distutils.util.strtobool(val)) +def str_to_bool(val: str) -> bool: + val = val.lower() + if val in ('y', 'yes', 't', 'true', 'on', '1'): + return True + elif val in ('n', 'no', 'f', 'false', 'off', '0'): + return False + else: + raise ValueError(f'Invalid truth value {val!r}') def error_exit(error_payload: Any, exit_code: CLIExitCodes = CLIExitCodes.FAILURE) -> NoReturn: From f4c68cc04a5222ca04cfb2ae6e21eb8580594d10 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 21 Nov 2025 18:15:44 +0000 Subject: [PATCH 283/332] Update all dependencies to their latest versions --- pyproject.toml | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bd45c729..323b052a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,23 +29,23 @@ dependencies = [ "python-dateutil==2.9.0", "Jinja2==3.1.6", "psutil==7.1.3", - "python-dotenv==1.0.1", + "python-dotenv==1.2.1", "terminaltables==3.1.10", - "requests==2.32.3", - "GitPython==3.1.43", - "packaging==24.1", + "requests==2.32.5", + "GitPython==3.1.45", + "packaging==25.0", "python-debian==1.0.1", - "PyYAML==6.0.1", + "PyYAML==6.0.3", "pyOpenSSL==25.3.0", - "MarkupSafe==2.1.5", - "Flask==3.0.3", + "MarkupSafe==3.0.3", + "Flask==3.1.2", "itsdangerous==2.2.0", - "cryptography==45.0.7", - "filelock==3.15.4", - "sh==2.0.6", - "python-crontab==3.0.0", + "cryptography==46.0.3", + "filelock==3.20.0", + "sh==2.2.2", + "python-crontab==3.3.0", "requests-mock==1.12.1", - "redis==5.0.7", + "redis==7.1.0", "PyInstaller==6.16.0", ] From 108257d3058b52446427113cbf09d4dc42bc7757 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 21 Nov 2025 18:17:23 +0000 Subject: [PATCH 284/332] Bump `setuptools` version to 80.9.0 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a7c14258..1b9162ab 100644 --- a/Dockerfile +++ b/Dockerfile @@ -25,5 +25,5 @@ ENV PATH=/app/buildvenv/bin:$PATH ENV PYTHONPATH="{PYTHONPATH}:/usr/lib/python3/dist-packages" RUN pip install --upgrade pip && \ - pip install wheel setuptools==63.2.0 && \ + pip install wheel setuptools==80.9.0 && \ pip install -e '.[dev]' From e3dbf31f820479ed044bcec1cb4a52c7a36efef1 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 21 Nov 2025 18:36:08 +0000 Subject: [PATCH 285/332] Update `dev` dependencies to their latest versions --- pyproject.toml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 323b052a..47914c7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,13 +54,13 @@ Homepage = "https://github.com/skalenetwork/node-cli" [project.optional-dependencies] dev = [ - "ruff==0.5.5", + "ruff==0.14.6", "bumpversion==0.6.0", - "pytest==8.2.2", - "pytest-cov==5.0.0", - "twine==5.1.1", - "mock==5.1.0", - "freezegun==1.5.1", + "pytest==9.0.1", + "pytest-cov==7.0.0", + "twine==6.2.0", + "mock==5.2.0", + "freezegun==1.5.5", ] [tool.setuptools] From 258d9646f99a2779951bbf4f3926889949ce08be Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 21 Nov 2025 19:15:20 +0000 Subject: [PATCH 286/332] Use builder, slim image and uv --- Dockerfile | 40 +++++++++++++++++++++------------------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/Dockerfile b/Dockerfile index 1b9162ab..91deca29 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,29 +1,31 @@ -FROM python:3.13-bookworm +FROM python:3.13-slim-bookworm AS builder -ENV DEBIAN_FRONTEND=noninteractive -RUN apt-get update && apt install -y \ +COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv + +WORKDIR /app + +COPY pyproject.toml ./ + +RUN uv pip install --system --no-cache ".[dev]" + +FROM python:3.13-slim-bookworm + +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ git \ - build-essential \ - software-properties-common \ - zlib1g-dev \ - libssl-dev \ - libffi-dev \ - swig \ iptables \ nftables \ python3-nftables \ - libxslt-dev \ - kmod - + kmod \ + wget && \ + rm -rf /var/lib/apt/lists/* -RUN mkdir /app WORKDIR /app -COPY . . +COPY --from=builder /usr/local/lib/python3.13/site-packages /usr/local/lib/python3.13/site-packages +COPY --from=builder /usr/local/bin /usr/local/bin -ENV PATH=/app/buildvenv/bin:$PATH -ENV PYTHONPATH="{PYTHONPATH}:/usr/lib/python3/dist-packages" +COPY . . -RUN pip install --upgrade pip && \ - pip install wheel setuptools==80.9.0 && \ - pip install -e '.[dev]' +ENV PYTHONPATH="/app:/usr/lib/python3/dist-packages" +ENV COLUMNS=80 From 52e8a60d1f3bd6e30f3bd54cdf801f41c06f79f8 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 21 Nov 2025 19:41:24 +0000 Subject: [PATCH 287/332] Install `binutils` --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 91deca29..7fc37545 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,7 +17,8 @@ RUN apt-get update && \ nftables \ python3-nftables \ kmod \ - wget && \ + wget \ + binutils && \ rm -rf /var/lib/apt/lists/* WORKDIR /app From 3ff9c2faa839a37a74bca5e279fc9e730dbc62b9 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 12:57:27 +0000 Subject: [PATCH 288/332] Bump `python-dateutil` version to 2.9.0.post0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 47914c7e..970112dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ "distro==1.9.0", "docker==7.1.0", "texttable==1.7.0", - "python-dateutil==2.9.0", + "python-dateutil==2.9.0.post0", "Jinja2==3.1.6", "psutil==7.1.3", "python-dotenv==1.2.1", From bff05d8c20ca88180a4fd12eb8d37aba76e46468 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 12:58:15 +0000 Subject: [PATCH 289/332] Set `quote-style` to "single" --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 970112dd..608092b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -74,3 +74,6 @@ exclude = ["tests"] [tool.ruff] line-length = 100 target-version = "py313" + +[tool.ruff.format] +quote-style = "single" \ No newline at end of file From e791fb8fa87b7d8a81ae52861cbdc106aaa4715e Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 13:09:06 +0000 Subject: [PATCH 290/332] Use `uv` for installing dependencies --- .github/workflows/test.yml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 5a2500aa..8f348515 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,6 +20,15 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Install uv + run: curl -LsSf https://astral.sh/uv/install.sh | sh + + - name: Cache uv + uses: actions/cache@v4 + with: + path: ~/.cache/uv + key: ${{ runner.os }}-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }} + - name: Install ubuntu dependencies run: | sudo apt-get update @@ -27,8 +36,7 @@ jobs: - name: Install python dependencies run: | - python -m pip install --upgrade pip - pip install -e ".[dev]" + uv pip install -e ".[dev]" - name: Generate info run: bash ./scripts/generate_info.sh 1.0.0 my-branch skale From 4bd90cc26ce747f7dbb0ed5b47f8bb6e2997d7f8 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 13:37:53 +0000 Subject: [PATCH 291/332] Create `venv` --- .github/workflows/test.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8f348515..b985132b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -36,6 +36,7 @@ jobs: - name: Install python dependencies run: | + uv venv uv pip install -e ".[dev]" - name: Generate info @@ -43,7 +44,7 @@ jobs: - name: Check with ruff run: | - ruff check + uv run ruff check - name: Build binary - skale run: | @@ -76,8 +77,8 @@ jobs: - name: Run tests run: | export PYTHONPATH=${PYTHONPATH}:/usr/lib/python3/dist-packages/ - bash ./scripts/run_tests.sh + uv run bash ./scripts/run_tests.sh - name: Run nftables tests run: | - scripts/run_nftables_test.sh + uv run scripts/run_nftables_test.sh From 623e821852e6b6b8f70ed31bd146b17ed8ce39db Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 13:45:35 +0000 Subject: [PATCH 292/332] Run `prepare test build` in `venv` --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b985132b..e5c1dd8f 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -68,7 +68,7 @@ jobs: - name: Run prepare test build run: | - bash scripts/build.sh test test skale + uv run bash scripts/build.sh test test skale - name: Run redis run: | From c72148b1561f7a87fecdaa0aa0a3e3abf12e47be Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 16:25:38 +0000 Subject: [PATCH 293/332] Bump Python version to 3.13 --- .github/workflows/publish.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 660ec07d..95797b24 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -79,10 +79,10 @@ jobs: with: submodules: true - - name: Set up Python 3.11 + - name: Set up Python 3.13 uses: actions/setup-python@v5 with: - python-version: 3.11 + python-version: 3.13 - name: Install ubuntu dependencies if: matrix.os == 'ubuntu-22.04' From 7f0f2f0bad639d7e573459d7fab4c9b4f9075cf1 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 16:26:19 +0000 Subject: [PATCH 294/332] Use importlib.metadata to get package version --- node_cli/cli/__init__.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/node_cli/cli/__init__.py b/node_cli/cli/__init__.py index 46419e54..c081741c 100644 --- a/node_cli/cli/__init__.py +++ b/node_cli/cli/__init__.py @@ -1,4 +1,9 @@ -__version__ = '3.2.0' +from importlib.metadata import version, PackageNotFoundError + +try: + __version__ = version("node-cli") +except PackageNotFoundError: + __version__ = "0.0.0-dev" if __name__ == '__main__': print(__version__) From 6c94e9a9fb2a3093cdcb7e5a916e1e39368525f9 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 18:57:02 +0000 Subject: [PATCH 295/332] Get version from 'pyproject.toml' --- scripts/set_versions_ga.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/set_versions_ga.sh b/scripts/set_versions_ga.sh index ddcc7aaa..7333357f 100644 --- a/scripts/set_versions_ga.sh +++ b/scripts/set_versions_ga.sh @@ -7,7 +7,7 @@ echo PROJECT_DIR: $GITHUB_WORKSPACE export BRANCH=${GITHUB_REF##*/} echo "Branch $BRANCH" -export VERSION=$(python setup.py --version) +export VERSION=$(python -c "import tomllib; print(tomllib.load(open('pyproject.toml', 'rb'))['project']['version'])") export VERSION=$(bash ./helper-scripts/calculate_version.sh) echo "VERSION=$VERSION" >> $GITHUB_ENV From 0fdbbdd0dfc137dbb738735fd705d46ea5be1a5e Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 19:08:46 +0000 Subject: [PATCH 296/332] Modernize and simplify the release workflow --- .github/workflows/publish.yml | 147 ++++++++++------------------------ 1 file changed, 42 insertions(+), 105 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 95797b24..c57d3bc1 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,140 +1,77 @@ -name: Build and publish +name: Build and Publish + on: pull_request: types: [closed] branches: - - master - develop - beta - stable - 'v*.*.*' - - 'fair' - - 'fair-*' jobs: - create_release: + build_and_release: if: github.event.pull_request.merged - name: Create release + name: Build and Create Release runs-on: ubuntu-22.04 - outputs: - upload_url: ${{ steps.create_release.outputs.upload_url }} - version: ${{ steps.export_outputs.outputs.version }} - branch: ${{ steps.export_outputs.outputs.branch }} + permissions: + contents: write steps: - name: Checkout code uses: actions/checkout@v4 with: submodules: true - - name: Checkout submodules - run: git submodule update --init --recursive - - - name: Install ubuntu dependencies - run: | - sudo apt-get update - sudo apt-get install python-setuptools - - - name: Set Versions - run: | - bash ./scripts/set_versions_ga.sh - - - name: Set release - run: | - if [[ "$BRANCH" == "stable" ]]; then - export PRERELEASE=false - else - export PRERELEASE=true - fi - echo "PRERELEASE=$PRERELEASE" >> $GITHUB_ENV - - - name: Create Release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ env.VERSION }} - release_name: ${{ env.VERSION }} - draft: false - prerelease: ${{ env.PRERELEASE }} - - - name: Export outputs - id: export_outputs - run: | - echo "::set-output name=version::$VERSION" - echo "::set-output name=branch::$BRANCH" - - build_and_publish: - if: github.event.pull_request.merged - needs: create_release - name: Build and publish ${{ matrix.build_type }} for ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-22.04] - build_type: [skale, fair] - steps: - - name: Checkout code - uses: actions/checkout@v4 - with: - submodules: true - - name: Set up Python 3.13 - uses: actions/setup-python@v5 + uses: actions/setup-python@v5 with: python-version: 3.13 - - name: Install ubuntu dependencies - if: matrix.os == 'ubuntu-22.04' + - name: Calculate Version + id: versioning run: | - sudo apt-get update - - - name: Ensure submodules are updated - run: git submodule update --init --recursive - - - name: Define Asset Name - id: asset_details + bash ./scripts/set_versions_ga.sh + + - name: Determine Prerelease status + id: release_info run: | - ASSET_BASE_NAME="skale-${{ needs.create_release.outputs.version }}-Linux-x86_64" - if [[ "${{ matrix.build_type }}" == "skale" ]]; then - echo "FINAL_ASSET_NAME=${ASSET_BASE_NAME}" >> $GITHUB_OUTPUT + if [[ "${{ env.BRANCH }}" == "stable" ]]; then + echo "prerelease=false" >> $GITHUB_OUTPUT else - echo "FINAL_ASSET_NAME=${ASSET_BASE_NAME}-${{ matrix.build_type }}" >> $GITHUB_OUTPUT + echo "prerelease=true" >> $GITHUB_OUTPUT fi - - name: Build ${{ matrix.build_type }} release binary + - name: Build binaries + id: build run: | mkdir -p ${{ github.workspace }}/dist docker build . -t node-cli-builder + docker run --rm -v ${{ github.workspace }}/dist:/app/dist node-cli-builder \ - bash scripts/build.sh ${{ needs.create_release.outputs.version }} ${{ needs.create_release.outputs.branch }} ${{ matrix.build_type }} - echo "Contents of dist directory:" - ls -altr ${{ github.workspace }}/dist/ - docker rm -f $(docker ps -aq) || true - - - name: Save sha512sum for ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} + bash scripts/build.sh ${{ env.VERSION }} ${{ env.BRANCH }} skale + + docker run --rm -v ${{ github.workspace }}/dist:/app/dist node-cli-builder \ + bash scripts/build.sh ${{ env.VERSION }} ${{ env.BRANCH }} fair + + echo "dist_path=${{ github.workspace }}/dist" >> $GITHUB_OUTPUT + + - name: Generate checksums run: | - cd ${{ github.workspace }}/dist - sha512sum ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} > ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512sum - echo "Checksum file created: ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512sum" - cat ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512sum - - - name: Upload release binary (${{ matrix.build_type }}) - uses: actions/upload-release-asset@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + cd ${{ steps.build.outputs.dist_path }} + for file in skale-*; do + sha512sum "$file" > "$file.sha512" + done + echo "Checksums generated:" + ls -l *.sha512 + + - name: Create GitHub Release + uses: softprops/action-gh-release@v2 with: - upload_url: ${{ needs.create_release.outputs.upload_url }} - asset_path: ${{ github.workspace }}/dist/${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} - asset_name: ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }} - asset_content_type: application/octet-stream - - - name: Upload release checksum (${{ matrix.build_type }}) - uses: actions/upload-release-asset@v1 + tag_name: ${{ env.VERSION }} + name: Release ${{ env.VERSION }} + draft: false + prerelease: ${{ steps.release_info.outputs.prerelease }} + files: | + ${{ steps.build.outputs.dist_path }}/skale-* env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ needs.create_release.outputs.upload_url }} - asset_path: ${{ github.workspace }}/dist/${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512sum - asset_name: ${{ steps.asset_details.outputs.FINAL_ASSET_NAME }}.sha512 - asset_content_type: text/plain \ No newline at end of file From 1fc9118e50d8971695d29d16ca17d17b2900f009 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 19:14:54 +0000 Subject: [PATCH 297/332] Add new branch for new release workflow testing --- .github/workflows/publish.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index c57d3bc1..bc6ef0ce 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -5,6 +5,7 @@ on: types: [closed] branches: - develop + - migrate-to-pyproject - beta - stable - 'v*.*.*' From cac1d9b54dfbdba14c0e7fac07c9a91db2707d8d Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 24 Nov 2025 19:42:07 +0000 Subject: [PATCH 298/332] Generate `generate_release_notes` --- .github/workflows/publish.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index bc6ef0ce..37aeff40 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -72,6 +72,7 @@ jobs: name: Release ${{ env.VERSION }} draft: false prerelease: ${{ steps.release_info.outputs.prerelease }} + generate_release_notes: true files: | ${{ steps.build.outputs.dist_path }}/skale-* env: From a7d74799002d455330b69e148ff77ab7b3b925a6 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 25 Nov 2025 12:21:48 +0000 Subject: [PATCH 299/332] Remove test branch and improve task names --- .github/workflows/publish.yml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 37aeff40..dd86ba23 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,11 +1,10 @@ -name: Build and Publish +name: Build and publish on: pull_request: types: [closed] branches: - develop - - migrate-to-pyproject - beta - stable - 'v*.*.*' @@ -13,7 +12,7 @@ on: jobs: build_and_release: if: github.event.pull_request.merged - name: Build and Create Release + name: Build and create release runs-on: ubuntu-22.04 permissions: contents: write @@ -28,12 +27,12 @@ jobs: with: python-version: 3.13 - - name: Calculate Version + - name: Calculate version id: versioning run: | bash ./scripts/set_versions_ga.sh - - name: Determine Prerelease status + - name: Determine prerelease status id: release_info run: | if [[ "${{ env.BRANCH }}" == "stable" ]]; then @@ -65,7 +64,7 @@ jobs: echo "Checksums generated:" ls -l *.sha512 - - name: Create GitHub Release + - name: Create GitHub release uses: softprops/action-gh-release@v2 with: tag_name: ${{ env.VERSION }} From 46baadec937dc2070744f828f70c8a3d6f93d33b Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 25 Nov 2025 13:11:41 +0000 Subject: [PATCH 300/332] Build `node-cli-builder` only once --- .github/workflows/test.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e5c1dd8f..5f589d47 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -46,10 +46,12 @@ jobs: run: | uv run ruff check + - name: Build docker image + run: docker build . -t node-cli-builder + - name: Build binary - skale run: | mkdir -p ./dist - docker build . -t node-cli-builder docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test skale docker rm -f $(docker ps -aq) @@ -59,7 +61,6 @@ jobs: - name: Build binary - fair run: | mkdir -p ./dist - docker build . -t node-cli-builder docker run -v /home/ubuntu/dist:/app/dist node-cli-builder bash scripts/build.sh test test fair docker rm -f $(docker ps -aq) From 94122eabd8cd8a4605d5e3d13c715f8799bded96 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 25 Nov 2025 13:52:12 +0000 Subject: [PATCH 301/332] Make PyInstaller spec paths absolute --- main.spec | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/main.spec b/main.spec index e3844bc1..8b7da407 100644 --- a/main.spec +++ b/main.spec @@ -1,16 +1,15 @@ # -*- mode: python -*- -import importlib.util - +import os block_cipher = None a = Analysis( ['node_cli/main.py'], - pathex=['.'], + pathex=[SPECPATH], datas=[ - ("./text.yml", "data"), - ("./datafiles/skaled-ssl-test", "data/datafiles") + (os.path.join(SPECPATH, "text.yml"), "data"), + (os.path.join(SPECPATH, "datafiles/skaled-ssl-test"), "data/datafiles") ], hiddenimports=[], hookspath=[], From e0b77cd90078c6dd2aaca52e2ac7bf5e93098061 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 25 Nov 2025 13:54:03 +0000 Subject: [PATCH 302/332] Remove `PackageNotFoundError` handling --- node_cli/cli/__init__.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/node_cli/cli/__init__.py b/node_cli/cli/__init__.py index c081741c..9a2285f7 100644 --- a/node_cli/cli/__init__.py +++ b/node_cli/cli/__init__.py @@ -1,9 +1,6 @@ -from importlib.metadata import version, PackageNotFoundError +from importlib.metadata import version -try: - __version__ = version("node-cli") -except PackageNotFoundError: - __version__ = "0.0.0-dev" +__version__ = version("node-cli") if __name__ == '__main__': print(__version__) From 89a28c4af1bc8bd1281b078acfd5b2082f35fce6 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Fri, 26 Dec 2025 20:08:27 +0000 Subject: [PATCH 303/332] Fix skale passive node options --- helper-scripts | 2 +- node_cli/operations/base.py | 2 +- node_cli/utils/docker_utils.py | 5 +---- node_cli/utils/meta.py | 2 +- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/helper-scripts b/helper-scripts index 808c768f..c1f67269 160000 --- a/helper-scripts +++ b/helper-scripts @@ -1 +1 @@ -Subproject commit 808c768feebfa99d9148e076b5b6b24b1b340734 +Subproject commit c1f67269955126ac400c2445d2aa81f1a387d964 diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index cbd0cbc7..94139959 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -343,7 +343,7 @@ def update_passive(env_filepath: str, env: Dict) -> bool: meta_manager.update_meta( VERSION, env['NODE_VERSION'], - env['DOCKER_LVMPY_VERSION'], + None, distro.id(), distro.version(), ) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 825886e6..9aa95f4f 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -77,10 +77,7 @@ 'boot-api': 'sk_boot_api', } -BASE_PASSIVE_COMPOSE_SERVICES = { - 'admin': 'sk_admin', - 'nginx': 'sk_nginx', -} +BASE_PASSIVE_COMPOSE_SERVICES = {'admin': 'sk_admin', 'nginx': 'sk_nginx', **REDIS_SERVICE_DICT} BASE_PASSIVE_FAIR_COMPOSE_SERVICES = { 'admin': 'sk_admin', diff --git a/node_cli/utils/meta.py b/node_cli/utils/meta.py index bb8ad8af..651efc1e 100644 --- a/node_cli/utils/meta.py +++ b/node_cli/utils/meta.py @@ -26,7 +26,7 @@ def asdict(self) -> dict: @dataclass class CliMeta(CliMetaBase): - docker_lvmpy_version: str = DEFAULT_DOCKER_LVMPY_VERSION + docker_lvmpy_version: str | None = DEFAULT_DOCKER_LVMPY_VERSION def asdict(self) -> dict: return { From 1f953bb0c8b37db54341343c1a6512035a68f08c Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 29 Dec 2025 17:49:19 +0000 Subject: [PATCH 304/332] Move the configuration from `pytest.ini` to `pyproject.toml` --- pyproject.toml | 11 ++++++++++- pytest.ini | 6 ------ 2 files changed, 10 insertions(+), 7 deletions(-) delete mode 100644 pytest.ini diff --git a/pyproject.toml b/pyproject.toml index 608092b3..cf813bfa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,4 +76,13 @@ line-length = 100 target-version = "py313" [tool.ruff.format] -quote-style = "single" \ No newline at end of file +quote-style = "single" + +[tool.pytest.ini_options] +log_cli = false +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +filterwarnings = [ + "ignore::DeprecationWarning", +] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 5785bf3f..00000000 --- a/pytest.ini +++ /dev/null @@ -1,6 +0,0 @@ -[pytest] -log_cli = 0 -log_cli_level = INFO -log_cli_format = %(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s) -log_cli_date_format=%Y-%m-%d %H:%M:%S -filterwarnings = ignore::DeprecationWarning From 4923b5478a1d8b1ae2104337d4c6fa3c40f9cb75 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 30 Dec 2025 20:33:22 +0000 Subject: [PATCH 305/332] Remove PASSIVE_COMPOSE_PATH --- node_cli/configs/__init__.py | 1 - node_cli/utils/docker_utils.py | 5 +---- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index ad4c2216..35d776d8 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -55,7 +55,6 @@ SGX_CERTIFICATES_DIR_NAME = 'sgx_certs' COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') -PASSIVE_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-passive.yml') FAIR_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-fair.yml') STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') FAIR_STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'fair_static_params.yaml') diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 9aa95f4f..2bc7b306 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -35,7 +35,6 @@ NGINX_CONTAINER_NAME, REMOVED_CONTAINERS_FOLDER_PATH, SGX_CERTIFICATES_DIR_NAME, - PASSIVE_COMPOSE_PATH, ) from node_cli.core.node_options import active_fair, active_skale, passive_fair, passive_skale from node_cli.utils.helper import run_cmd, str_to_bool @@ -292,9 +291,7 @@ def compose_build(env: dict, node_type: NodeType, node_mode: NodeMode): def get_compose_path(node_type: NodeType, node_mode: NodeMode) -> str: - if passive_skale(node_type, node_mode): - return PASSIVE_COMPOSE_PATH - elif active_fair(node_type, node_mode) or passive_fair(node_type, node_mode): + if node_type == NodeType.FAIR: return FAIR_COMPOSE_PATH return COMPOSE_PATH From 45de1fb4c530063753b9de2d5e766d131a38fda2 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Mon, 5 Jan 2026 20:17:31 +0000 Subject: [PATCH 306/332] Add api and watchdog containers to `BASE_PASSIVE_COMPOSE_SERVICES` --- node_cli/utils/docker_utils.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 2bc7b306..098a61e7 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -76,7 +76,13 @@ 'boot-api': 'sk_boot_api', } -BASE_PASSIVE_COMPOSE_SERVICES = {'admin': 'sk_admin', 'nginx': 'sk_nginx', **REDIS_SERVICE_DICT} +BASE_PASSIVE_COMPOSE_SERVICES = { + 'admin': 'sk_admin', + 'nginx': 'sk_nginx', + 'api': 'sk_api', + 'watchdog': 'sk_watchdog', + **REDIS_SERVICE_DICT, +} BASE_PASSIVE_FAIR_COMPOSE_SERVICES = { 'admin': 'sk_admin', From 7b7d589c769e77ca1d5ee68ebd643582073f664c Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Wed, 7 Jan 2026 17:47:15 +0000 Subject: [PATCH 307/332] Set Redis data directory ownership --- node_cli/core/host.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/node_cli/core/host.py b/node_cli/core/host.py index 8c684455..da4be640 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -19,7 +19,7 @@ import logging import os -from shutil import copyfile +from shutil import copyfile, chown from urllib.parse import urlparse from node_cli.core.resources import update_resource_allocation @@ -94,6 +94,7 @@ def prepare_host(env_filepath: str, env_type: str, allocation: bool = False) -> try: logger.info('Preparing host started') make_dirs() + chown(REDIS_DATA_PATH, user=999, group=1000) save_env_params(env_filepath) if allocation: From fae6a38a0b9183322aa2e0082a5e45594a399fd8 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 9 Jan 2026 19:50:22 +0000 Subject: [PATCH 308/332] Run `run_host_checks` during passive SKALE node init --- node_cli/operations/base.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 94139959..0429ae37 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -291,6 +291,16 @@ def init_passive( env_filepath, env_type=env['ENV_TYPE'], ) + failed_checks = run_host_checks( + env['BLOCK_DEVICE'], + TYPE, + NodeMode.PASSIVE, + env['ENV_TYPE'], + CONTAINER_CONFIG_PATH, + check_type=CheckType.PREINSTALL + ) + if failed_checks: + print_failed_requirements_checks(failed_checks) set_passive_node_options(archive=archive, indexer=indexer) From 0bc7f3a982ac678a3b0718cef8b8f01d1f2fc51f Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Fri, 9 Jan 2026 19:54:49 +0000 Subject: [PATCH 309/332] Run `run_host_checks` during passive SKALE node update --- node_cli/operations/base.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 0429ae37..05132995 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -349,6 +349,17 @@ def update_passive(env_filepath: str, env: Dict) -> bool: prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) + failed_checks = run_host_checks( + env['BLOCK_DEVICE'], + TYPE, + NodeMode.PASSIVE, + env['ENV_TYPE'], + CONTAINER_CONFIG_PATH, + check_type=CheckType.PREINSTALL + ) + if failed_checks: + print_failed_requirements_checks(failed_checks) + meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, From 67b68617c710eed098d58c2e97c6361ed982aaf7 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 12:35:18 +0000 Subject: [PATCH 310/332] Move `init_fair_boot` and `update_fair_boot` functions to `fair.py` --- node_cli/operations/__init__.py | 4 +- node_cli/operations/base.py | 75 --------------------------------- node_cli/operations/fair.py | 35 ++++++++++++++- 3 files changed, 36 insertions(+), 78 deletions(-) diff --git a/node_cli/operations/__init__.py b/node_cli/operations/__init__.py index f2ac1a94..7a72b442 100644 --- a/node_cli/operations/__init__.py +++ b/node_cli/operations/__init__.py @@ -21,8 +21,6 @@ update as update_op, init as init_op, init_passive as init_passive_op, - init_fair_boot as init_fair_boot_op, - update_fair_boot as update_fair_boot_op, update_passive as update_passive_op, turn_off as turn_off_op, turn_on as turn_on_op, @@ -31,7 +29,9 @@ configure_nftables, ) from node_cli.operations.fair import ( # noqa + init_fair_boot as init_fair_boot_op, init as init_fair_op, + update_fair_boot as update_fair_boot_op, update as update_fair_op, FairUpdateType, restore as restore_fair_op, diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 05132995..1a64d585 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -159,48 +159,6 @@ def update(env_filepath: str, env: Dict, node_mode: NodeMode) -> bool: return True -@checked_host -def update_fair_boot(env_filepath: str, env: Dict, node_mode: NodeMode = NodeMode.ACTIVE) -> bool: - compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=env) - remove_dynamic_containers() - cleanup_volume_artifacts(env['BLOCK_DEVICE']) - - sync_skale_node() - ensure_btrfs_kernel_module_autoloaded() - - if env.get('SKIP_DOCKER_CONFIG') != 'True': - configure_docker() - - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) - - generate_nginx_config() - prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') - - prepare_host(env_filepath, env['ENV_TYPE']) - - meta_manager = FairCliMetaManager() - current_stream = meta_manager.get_meta_info().config_stream - skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['NODE_VERSION']: - logger.info( - 'Stream version was changed from %s to %s', - current_stream, - env['NODE_VERSION'], - ) - docker_cleanup() - - meta_manager.update_meta( - VERSION, - env['NODE_VERSION'], - distro.id(), - distro.version(), - ) - update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) - compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True) - return True - - @checked_host def init(env_filepath: str, env: dict, node_mode: NodeMode) -> None: sync_skale_node() @@ -236,39 +194,6 @@ def init(env_filepath: str, env: dict, node_mode: NodeMode) -> None: compose_up(env=env, node_type=NodeType.SKALE, node_mode=node_mode) -@checked_host -def init_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMode.ACTIVE) -> None: - sync_skale_node() - cleanup_volume_artifacts(env['BLOCK_DEVICE']) - - ensure_btrfs_kernel_module_autoloaded() - if env.get('SKIP_DOCKER_CONFIG') != 'True': - configure_docker() - - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) - - prepare_host(env_filepath, env_type=env['ENV_TYPE']) - link_env_file() - mark_active_node() - - configure_filebeat() - configure_flask() - generate_nginx_config() - prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') - - meta_manager = FairCliMetaManager() - meta_manager.update_meta( - VERSION, - env['NODE_VERSION'], - distro.id(), - distro.version(), - ) - update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) - - compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True) - - def init_passive( env_filepath: str, env: dict, diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index ca7baef5..f1e62305 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -38,7 +38,7 @@ from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_no_lvm_datadir from node_cli.core.static_config import get_fair_chain_name -from node_cli.core.node_options import set_passive_node_options, upsert_node_mode +from node_cli.core.node_options import mark_active_node, set_passive_node_options, upsert_node_mode from node_cli.fair.record.chain_record import ( get_fair_chain_record, migrate_chain_record, @@ -80,6 +80,39 @@ class FairUpdateType(Enum): FROM_BOOT = 'from_boot' +@checked_host +def init_fair_boot(env_filepath: str, env: dict) -> None: + sync_skale_node() + cleanup_volume_artifacts(env['BLOCK_DEVICE']) + + ensure_btrfs_kernel_module_autoloaded() + if env.get('SKIP_DOCKER_CONFIG') != 'True': + configure_docker() + + enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) + configure_nftables(enable_monitoring=enable_monitoring) + + prepare_host(env_filepath, env_type=env['ENV_TYPE']) + link_env_file() + mark_active_node() + + configure_filebeat() + configure_flask() + generate_nginx_config() + prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') + + meta_manager = FairCliMetaManager() + meta_manager.update_meta( + VERSION, + env['NODE_VERSION'], + distro.id(), + distro.version(), + ) + update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) + + compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True) + + @checked_host def init( env_filepath: str, From 3b0e92dd31c0dbe1355fbf08b3242be0f88950db Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 12:36:01 +0000 Subject: [PATCH 311/332] Delete unused constant `BASE_CONTAINERS_AMOUNT` --- node_cli/core/node.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 725deee3..43df3c3a 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -94,7 +94,6 @@ logger = logging.getLogger(__name__) TEXTS = safe_load_texts() -BASE_CONTAINERS_AMOUNT = 5 BLUEPRINT_NAME = 'node' From 05d1251e88e722e6bece8371ca7c719dbfb8e324 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 13:03:19 +0000 Subject: [PATCH 312/332] Delete unused parameter `unsafe_ok` --- node_cli/cli/passive_node.py | 2 +- node_cli/core/node.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index 9c92c767..c1f406da 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -69,7 +69,7 @@ def _init_passive( @click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) @click.argument('env_file') @streamed_cmd -def _update_passive(env_file, unsafe_ok): +def _update_passive(env_file): update_passive(env_file) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 43df3c3a..c1264346 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -206,7 +206,7 @@ def init_passive( @check_inited @check_user -def update_passive(env_filepath: str, unsafe_ok: bool = False) -> None: +def update_passive(env_filepath: str) -> None: logger.info('Node update started') prev_version = CliMetaManager().get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': From e63a42d9aa86366727e219f48e805a926dd6dff0 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 13:59:25 +0000 Subject: [PATCH 313/332] Remove redundant parentheses --- node_cli/core/resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 68686738..f75a5c1c 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -151,7 +151,7 @@ def get_cpu_alloc(common_config: Dict) -> ResourceAlloc: cpu_proportions = common_config['schain']['cpu'] schain_max_cpu_shares = int(cpu_proportions['skaled'] * MAX_CPU_SHARES) ima_max_cpu_shares = int(cpu_proportions['ima'] * MAX_CPU_SHARES) - return (ResourceAlloc(schain_max_cpu_shares), ResourceAlloc(ima_max_cpu_shares)) + return ResourceAlloc(schain_max_cpu_shares), ResourceAlloc(ima_max_cpu_shares) def verify_disk_size( From 5ba2a8cd111d66289a3754e919ccb6c11fa5d315 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 15:24:22 +0000 Subject: [PATCH 314/332] Fix `tail` parameter type --- node_cli/utils/docker_utils.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 098a61e7..96e1c10a 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -211,12 +211,11 @@ def remove_schain_container_by_name( def backup_container_logs( container: Container, - head: int = DOCKER_DEFAULT_HEAD_LINES, - tail: int = DOCKER_DEFAULT_TAIL_LINES, + tail: int | str = DOCKER_DEFAULT_TAIL_LINES, ) -> None: logger.info(f'Going to backup container logs: {container.name}') logs_backup_filepath = get_logs_backup_filepath(container) - save_container_logs(container, logs_backup_filepath, tail) + save_container_logs(container, logs_backup_filepath, tail=tail) logger.info(f'Old container logs saved to {logs_backup_filepath}, tail: {tail}') @@ -224,7 +223,7 @@ def save_container_logs( container: Container, log_filepath: str, head: int = DOCKER_DEFAULT_HEAD_LINES, - tail: int = DOCKER_DEFAULT_TAIL_LINES, + tail: int | str = DOCKER_DEFAULT_TAIL_LINES, ) -> None: separator = b'=' * 80 + b'\n' tail_lines = container.logs(tail=tail) From 5debc47cff5ce916394a20be420a66c5b26c8232 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 15:25:05 +0000 Subject: [PATCH 315/332] Add `node_mode` parameter --- node_cli/core/node.py | 1 + 1 file changed, 1 insertion(+) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index c1264346..2128d43f 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -262,6 +262,7 @@ def compose_node_env( else: user_config = get_validated_user_config( node_type=node_type, + node_mode=node_mode, env_filepath=INIT_ENV_FILEPATH, is_fair_boot=is_fair_boot, skip_user_conf_validation=skip_user_conf_validation, From afc79eea94288fce7226f83ca66b51f3fb8c8cf8 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 15:25:35 +0000 Subject: [PATCH 316/332] Clean code --- node_cli/core/checks.py | 1 - node_cli/core/resources.py | 5 +---- node_cli/fair/staking.py | 1 - 3 files changed, 1 insertion(+), 6 deletions(-) diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index 9c312476..a4c439c9 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -385,7 +385,6 @@ def docker_compose(self) -> CheckResult: ) output = v_cmd_result.stdout.decode('utf-8').rstrip() if v_cmd_result.returncode != 0: - info = f'Checking docker compose version failed with: {output}' return self._failed(name=name, info=output) actual_version = output.split(',')[0].split()[-1].strip() diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index f75a5c1c..430eee86 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -154,10 +154,7 @@ def get_cpu_alloc(common_config: Dict) -> ResourceAlloc: return ResourceAlloc(schain_max_cpu_shares), ResourceAlloc(ima_max_cpu_shares) -def verify_disk_size( - disk_device: str, - env_configs: dict, -) -> Dict: +def verify_disk_size(disk_device: str, env_configs: dict): disk_size = get_disk_size(disk_device) env_disk_size = env_configs['server']['disk'] check_disk_size(disk_size, env_disk_size) diff --git a/node_cli/fair/staking.py b/node_cli/fair/staking.py index 4b7a4008..95b93adf 100644 --- a/node_cli/fair/staking.py +++ b/node_cli/fair/staking.py @@ -109,7 +109,6 @@ def get_exit_requests(raw: bool = False) -> None: exit_requests = payload.get('exit_requests') if not isinstance(exit_requests, list): error_exit(payload, exit_code=CLIExitCodes.BAD_API_RESPONSE) - return if raw: print(json.dumps(exit_requests, indent=2)) return From 88f6959a6612b95dbcac97e173c2a0656b250d3e Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 15:47:06 +0000 Subject: [PATCH 317/332] Delete duplicated function `ensure_filestorage_mapping` --- node_cli/operations/docker_lvmpy.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/node_cli/operations/docker_lvmpy.py b/node_cli/operations/docker_lvmpy.py index fb70e480..c21328f8 100644 --- a/node_cli/operations/docker_lvmpy.py +++ b/node_cli/operations/docker_lvmpy.py @@ -35,6 +35,7 @@ SCHAINS_MNT_DIR_REGULAR, VOLUME_GROUP, ) +from node_cli.operations.volume import ensure_filestorage_mapping from lvmpy.src.install import setup as setup_lvmpy logger = logging.getLogger(__name__) @@ -49,11 +50,6 @@ def update_docker_lvmpy_env(env): return env -def ensure_filestorage_mapping(mapping_dir=FILESTORAGE_MAPPING): - if not os.path.isdir(FILESTORAGE_MAPPING): - os.makedirs(FILESTORAGE_MAPPING) - - def sync_docker_lvmpy_repo(env): if os.path.isdir(DOCKER_LVMPY_PATH): shutil.rmtree(DOCKER_LVMPY_PATH) From 795f1a363037a0701a17710e79e024c47f3a7841 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 16:38:56 +0000 Subject: [PATCH 318/332] Clean code --- node_cli/core/nftables.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/node_cli/core/nftables.py b/node_cli/core/nftables.py index 042ff2e1..e0059fff 100644 --- a/node_cli/core/nftables.py +++ b/node_cli/core/nftables.py @@ -529,7 +529,6 @@ def add_loopback_rule(self, chain) -> None: def get_base_ruleset(self) -> str: self.nft.set_json_output(False) - output = '' try: cmd = f'list chain {self.family} {self.table} {self.chain}' rc, output, error = self.nft.cmd(cmd) @@ -539,7 +538,6 @@ def get_base_ruleset(self) -> str: finally: self.nft.set_json_output(True) - return output def setup_firewall(self, enable_monitoring: bool = False) -> None: """Setup firewall rules.""" From 6ba32375bad95775dc537e01b85db323e85601ac Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 19:15:34 +0000 Subject: [PATCH 319/332] Fix `get_containers` and `create_logs_dump` functions --- node_cli/core/logs.py | 2 +- node_cli/utils/docker_utils.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/node_cli/core/logs.py b/node_cli/core/logs.py index 67472e96..1563ddfc 100644 --- a/node_cli/core/logs.py +++ b/node_cli/core/logs.py @@ -42,7 +42,7 @@ def create_logs_dump(path, filter_container=None): if filter_container: containers = get_containers(filter_container) else: - containers = get_containers('skale') + containers = get_containers('sk_*') for container in containers: log_filepath = os.path.join(containers_logs_path, f'{container.name}.log') diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 96e1c10a..e30092a8 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -118,8 +118,7 @@ def get_sanitized_container_name(container_info: dict) -> str: def get_containers(container_name_filter=None, _all=True) -> list: - return docker_client().containers.list(all=_all) - + return docker_client().containers.list(all=_all, filters={'name': container_name_filter}) def get_all_schain_containers(_all=True) -> list: return docker_client().containers.list(all=_all, filters={'name': 'sk_skaled_*'}) From 475a2117ae2d47ea65ed49dd943ecff2d5d3d5a2 Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 19:48:23 +0000 Subject: [PATCH 320/332] Delete unused optional parameter `unsafe_ok` --- node_cli/cli/passive_node.py | 1 - 1 file changed, 1 deletion(-) diff --git a/node_cli/cli/passive_node.py b/node_cli/cli/passive_node.py index c1f406da..f86ba99b 100644 --- a/node_cli/cli/passive_node.py +++ b/node_cli/cli/passive_node.py @@ -66,7 +66,6 @@ def _init_passive( expose_value=False, prompt='Are you sure you want to update SKALE node software?', ) -@click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) @click.argument('env_file') @streamed_cmd def _update_passive(env_file): From b8dca6562a119d889c6351a235f8720605f9cc7e Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Tue, 13 Jan 2026 19:50:29 +0000 Subject: [PATCH 321/332] Fix `get_containers` function --- node_cli/utils/docker_utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index e30092a8..695a1253 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -118,7 +118,11 @@ def get_sanitized_container_name(container_info: dict) -> str: def get_containers(container_name_filter=None, _all=True) -> list: - return docker_client().containers.list(all=_all, filters={'name': container_name_filter}) + filters = {} + if container_name_filter: + filters['name'] = container_name_filter + return docker_client().containers.list(all=_all, filters=filters) + def get_all_schain_containers(_all=True) -> list: return docker_client().containers.list(all=_all, filters={'name': 'sk_skaled_*'}) From d00ec041f5da8dfb7f0337044b7a09c670b3533d Mon Sep 17 00:00:00 2001 From: Alex Sheverdin Date: Sun, 25 Jan 2026 21:48:34 +0000 Subject: [PATCH 322/332] Add `bite` variable --- node_cli/configs/user.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/node_cli/configs/user.py b/node_cli/configs/user.py index 59ecda77..a4f52c2c 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/user.py @@ -127,6 +127,7 @@ class SkaleUserConfig(BaseUserConfig): disable_dry_run: str = '' default_gas_limit: str = '' default_gas_price_wei: str = '' + bite: str = '' @dataclass @@ -136,6 +137,7 @@ class PassiveSkaleUserConfig(BaseUserConfig): schain_name: str = '' ima_contracts: str = '' enforce_btrfs: str = '' + bite: str = '' def get_validated_user_config( From c190221f392c52342297b0d20c6145c51e9700f7 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Fri, 13 Feb 2026 18:39:00 +0000 Subject: [PATCH 323/332] update settings structure - wip --- .github/copilot-instructions.md | 9 ++-- .gitignore | 1 + node_cli/cli/node.py | 18 ++++--- node_cli/configs/__init__.py | 11 ++-- node_cli/configs/{user.py => _user.py} | 6 +-- node_cli/core/host.py | 27 +--------- node_cli/core/node.py | 72 +++++--------------------- node_cli/operations/base.py | 10 ++-- node_cli/operations/common.py | 13 ----- node_cli/operations/fair.py | 9 ++-- node_cli/utils/docker_utils.py | 3 -- node_cli/utils/helper.py | 11 ++-- node_cli/utils/node_type.py | 6 +-- node_cli/utils/print_formatters.py | 3 -- node_cli/utils/settings.py | 47 +++++++++++++++++ pyproject.toml | 29 ++++++----- scripts/export_env.sh | 8 +++ scripts/run_tests.sh | 10 +--- tests/cli/node_test.py | 4 +- tests/conftest.py | 2 +- tests/core/core_node_test.py | 11 +--- tests/utils/settings_test.py | 20 +++++++ 22 files changed, 155 insertions(+), 175 deletions(-) rename node_cli/configs/{user.py => _user.py} (98%) create mode 100644 node_cli/utils/settings.py create mode 100644 scripts/export_env.sh create mode 100644 tests/utils/settings_test.py diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index a5c8ebdf..5df55bfe 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -11,7 +11,10 @@ - no redundant code - move repeated logic into helper functions - use type hints to specify the expected types of function arguments and return values -- check `ruff.toml` for formatting rules -- always lint changes using `ruff check` +- check `pyproject.toml` for formatting rules +- always lint changes using `uv run ruff check` - tests should be placed in `tests/` directory, follow the existing structure and code style -- to run a test always use `bash scripts/run_tests.sh tests/path_to_test.py -k [TEST_NAME]` command \ No newline at end of file +- always use `uv` to run all commands in the repo (e.g., `uv run ruff`, `uv run pytest`, etc.) +- for running tests, export environment variables in the terminal before running the tests: `. ./scripts/export_env.sh` + +- additional external context is located in context directory \ No newline at end of file diff --git a/.gitignore b/.gitignore index 87a0d8f8..65232943 100644 --- a/.gitignore +++ b/.gitignore @@ -125,3 +125,4 @@ tests/.skale/node_data/node_options.json tests/.skale/config/nginx.conf.j2 .zed +uv.lock \ No newline at end of file diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index 55304356..f13825d9 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -17,8 +17,11 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +from typing import get_args + import click +from skale.core.types import EnvType from node_cli.cli.info import TYPE from node_cli.core.node import ( cleanup as cleanup_skale, @@ -38,7 +41,6 @@ run_checks, ) from node_cli.configs import DEFAULT_NODE_BASE_PORT -from node_cli.configs.user import ALLOWED_ENV_TYPES from node_cli.core.node_options import upsert_node_mode from node_cli.utils.decorators import check_inited from node_cli.utils.helper import abort_if_false, streamed_cmd, IP_TYPE @@ -85,10 +87,10 @@ def register_node(name, ip, port, domain): @node.command('init', help='Initialize SKALE node') -@click.argument('env_file') +@click.argument('config_file') @streamed_cmd -def init_node(env_file): - init(env_filepath=env_file, node_type=TYPE) +def init_node(config_file): + init(config_file=config_file, node_type=TYPE) @node.command('update', help='Update node from .env file') @@ -101,12 +103,12 @@ def init_node(env_file): ) @click.option('--pull-config', 'pull_config_for_schain', hidden=True, type=str) @click.option('--unsafe', 'unsafe_ok', help='Allow unsafe update', hidden=True, is_flag=True) -@click.argument('env_file') +@click.argument('config_file') @streamed_cmd -def update_node(env_file, pull_config_for_schain, unsafe_ok): +def update_node(config_file, pull_config_for_schain, unsafe_ok): update( node_mode=NodeMode.ACTIVE, - env_filepath=env_file, + env_filepath=config_file, pull_config_for_schain=pull_config_for_schain, node_type=TYPE, unsafe_ok=unsafe_ok, @@ -227,7 +229,7 @@ def _set_domain_name(domain): @click.option( '--network', '-n', - type=click.Choice(ALLOWED_ENV_TYPES), + type=click.Choice(get_args(EnvType)), default='mainnet', help='Network to check', ) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index 35d776d8..f9fc0aa5 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -19,6 +19,7 @@ import os import sys +from pathlib import Path from node_cli.utils.global_config import read_g_config @@ -43,6 +44,11 @@ NODE_DATA_PATH = os.path.join(SKALE_DIR, 'node_data') SCHAIN_NODE_DATA_PATH = os.path.join(NODE_DATA_PATH, 'schains') NODE_CLI_STATUS_FILENAME = 'node_cli.status' + +SETTINGS_DIR = Path(NODE_DATA_PATH) / 'settings' +NODE_SETTINGS_PATH = SETTINGS_DIR / 'node.toml' +INTERNAL_SETTINGS_PATH = SETTINGS_DIR / 'internal.toml' + NODE_CONFIG_PATH = os.path.join(NODE_DATA_PATH, 'node_config.json') CONTAINER_CONFIG_PATH = os.path.join(SKALE_DIR, 'config') CONTAINER_CONFIG_TMP_PATH = os.path.join(SKALE_TMP_DIR, 'config') @@ -52,8 +58,6 @@ INIT_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') SKALE_RUN_DIR = '/var/run/skale' -SGX_CERTIFICATES_DIR_NAME = 'sgx_certs' - COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') FAIR_COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose-fair.yml') STATIC_PARAMS_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, 'static_params.yaml') @@ -95,9 +99,6 @@ IPTABLES_RULES_STATE_FILEPATH = os.path.join(IPTABLES_DIR, 'rules.v4') DEFAULT_SSH_PORT = 22 -FLASK_SECRET_KEY_FILENAME = 'flask_db_key.txt' -FLASK_SECRET_KEY_FILE = os.path.join(NODE_DATA_PATH, FLASK_SECRET_KEY_FILENAME) - DOCKER_CONFIG_FILEPATH = '/etc/docker/daemon.json' HIDE_STREAM_LOG = os.getenv('HIDE_STREAM_LOG') diff --git a/node_cli/configs/user.py b/node_cli/configs/_user.py similarity index 98% rename from node_cli/configs/user.py rename to node_cli/configs/_user.py index a4f52c2c..6ab7b6f5 100644 --- a/node_cli/configs/user.py +++ b/node_cli/configs/_user.py @@ -52,6 +52,7 @@ class ValidationResult(NamedTuple): class BaseUserConfig(ABC): node_version: str env_type: str + endpoint: str filebeat_host: str block_device: str @@ -89,7 +90,6 @@ def validate_params(cls, params: Dict) -> ValidationResult: @dataclass class FairUserConfig(BaseUserConfig): fair_contracts: str - boot_endpoint: str sgx_server_url: str enforce_btrfs: str = '' telegraf: str = '' @@ -99,13 +99,11 @@ class FairUserConfig(BaseUserConfig): @dataclass class PassiveFairUserConfig(BaseUserConfig): fair_contracts: str - boot_endpoint: str enforce_btrfs: str = '' @dataclass class FairBootUserConfig(BaseUserConfig): - endpoint: str manager_contracts: str ima_contracts: str sgx_server_url: str @@ -114,7 +112,6 @@ class FairBootUserConfig(BaseUserConfig): @dataclass class SkaleUserConfig(BaseUserConfig): - endpoint: str manager_contracts: str ima_contracts: str docker_lvmpy_version: str @@ -132,7 +129,6 @@ class SkaleUserConfig(BaseUserConfig): @dataclass class PassiveSkaleUserConfig(BaseUserConfig): - endpoint: str manager_contracts: str schain_name: str = '' ima_contracts: str = '' diff --git a/node_cli/core/host.py b/node_cli/core/host.py index da4be640..920b651e 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -39,6 +39,7 @@ SGX_CERTS_PATH, REPORTS_PATH, REDIS_DATA_PATH, + SETTINGS_DIR, SCHAINS_DATA_PATH, LOG_PATH, REMOVED_CONTAINERS_FOLDER_PATH, @@ -50,7 +51,6 @@ NGINX_CONFIG_FILEPATH, ) from node_cli.configs.cli_logger import LOG_DATA_PATH -from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH from node_cli.core.nftables import NFTablesManager from node_cli.utils.helper import safe_mkdir @@ -73,20 +73,6 @@ def fix_url(url): return False -def get_flask_secret_key() -> str: - secret_key_filepath = os.path.join(NODE_DATA_PATH, 'flask_db_key.txt') - - if not os.path.exists(secret_key_filepath): - error_exit(f'Flask secret key file not found at {secret_key_filepath}') - - try: - with open(secret_key_filepath, 'r') as key_file: - secret_key = key_file.read().strip() - return secret_key - except (IOError, OSError) as e: - error_exit(f'Failed to read Flask secret key: {e}') - - def prepare_host(env_filepath: str, env_type: str, allocation: bool = False) -> None: if not env_filepath or not env_type: error_exit('Missing required parameters for host initialization') @@ -121,6 +107,7 @@ def make_dirs(): LOG_PATH, REPORTS_PATH, REDIS_DATA_PATH, + SETTINGS_DIR, SKALE_RUN_DIR, SKALE_STATE_DIR, SKALE_TMP_DIR, @@ -128,16 +115,6 @@ def make_dirs(): safe_mkdir(dir_path) -def save_env_params(env_filepath: str) -> None: - copyfile(env_filepath, SKALE_DIR_ENV_FILEPATH) - - -def link_env_file(): - if not (os.path.islink(CONFIGS_ENV_FILEPATH) or os.path.isfile(CONFIGS_ENV_FILEPATH)): - logger.info('Creating symlink %s → %s', SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH) - os.symlink(SKALE_DIR_ENV_FILEPATH, CONFIGS_ENV_FILEPATH) - - def init_logs_dir(): safe_mkdir(LOG_DATA_PATH) safe_mkdir(REMOVED_CONTAINERS_FOLDER_PATH) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 2128d43f..191d2077 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -43,9 +43,8 @@ TM_INIT_TIMEOUT, ) from node_cli.configs.cli_logger import LOG_DATA_PATH as CLI_LOG_DATA_PATH -from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH, get_validated_user_config from node_cli.core.checks import run_checks as run_host_checks -from node_cli.core.host import get_flask_secret_key, is_node_inited, save_env_params +from node_cli.core.host import is_node_inited from node_cli.core.resources import update_resource_allocation from node_cli.core.node_options import ( active_fair, @@ -152,11 +151,11 @@ def register_node(name, p2p_ip, public_ip, port, domain_name): @check_not_inited -def init(env_filepath: str, node_type: NodeType) -> None: +def init(config_file: str, node_type: NodeType) -> None: node_mode = NodeMode.ACTIVE - env = compose_node_env(env_filepath=env_filepath, node_type=node_type, node_mode=node_mode) + env = compose_node_env(node_type=node_type, node_mode=node_mode) - init_op(env_filepath=env_filepath, env=env, node_mode=node_mode) + init_op(env_filepath=config_file, env=env, node_mode=node_mode) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): @@ -169,7 +168,7 @@ def init(env_filepath: str, node_type: NodeType) -> None: @check_not_inited def restore(backup_path, env_filepath, node_type: NodeType, no_snapshot=False, config_only=False): node_mode = NodeMode.ACTIVE - env = compose_node_env(env_filepath=env_filepath, node_type=node_type, node_mode=node_mode) + env = compose_node_env(node_type=node_type, node_mode=node_mode) if env is None: return save_env_params(env_filepath) @@ -211,7 +210,7 @@ def update_passive(env_filepath: str) -> None: prev_version = CliMetaManager().get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() - env = compose_node_env(env_filepath, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) + env = compose_node_env(node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) update_ok = update_passive_op(env_filepath, env) if update_ok: logger.info('Waiting for containers initialization') @@ -227,69 +226,22 @@ def update_passive(env_filepath: str) -> None: @check_user def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) - env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, - save=False, - node_type=NodeType.SKALE, - node_mode=node_mode, - skip_user_conf_validation=True, - ) + env = compose_node_env(NodeType.SKALE, node_mode) cleanup_skale_op(node_mode=node_mode, env=env, prune=prune) logger.info('SKALE node was cleaned up, all containers and data removed') -def compose_node_env( - env_filepath: str, - node_type: NodeType, - node_mode: NodeMode, - inited_node: bool = False, - sync_schains: Optional[bool] = None, - pull_config_for_schain: Optional[str] = None, - save: bool = True, - is_fair_boot: bool = False, - skip_user_conf_validation: bool = False, -) -> dict[str, str]: - if env_filepath is not None: - user_config = get_validated_user_config( - node_type=node_type, - node_mode=node_mode, - env_filepath=env_filepath, - is_fair_boot=is_fair_boot, - skip_user_conf_validation=skip_user_conf_validation, - ) - if save: - save_env_params(env_filepath) - else: - user_config = get_validated_user_config( - node_type=node_type, - node_mode=node_mode, - env_filepath=INIT_ENV_FILEPATH, - is_fair_boot=is_fair_boot, - skip_user_conf_validation=skip_user_conf_validation, - ) - +def compose_node_env(node_type: NodeType, node_mode: NodeMode) -> dict[str, str]: if node_mode == NodeMode.PASSIVE or node_type == NodeType.FAIR: mnt_dir = SCHAINS_MNT_DIR_SINGLE_CHAIN else: mnt_dir = SCHAINS_MNT_DIR_REGULAR - env = { 'SKALE_DIR': SKALE_DIR, 'SCHAINS_MNT_DIR': mnt_dir, 'FILESTORAGE_MAPPING': FILESTORAGE_MAPPING, 'SKALE_LIB_PATH': SKALE_STATE_DIR, - **user_config.to_env(), } - - if inited_node and not node_mode == NodeMode.PASSIVE: - env['FLASK_SECRET_KEY'] = get_flask_secret_key() - - if sync_schains and not node_mode == NodeMode.PASSIVE: - env['BACKUP_RUN'] = 'True' - - if pull_config_for_schain: - env['PULL_CONFIG_FOR_SCHAIN'] = pull_config_for_schain - return {k: v for k, v in env.items() if v != ''} @@ -313,10 +265,10 @@ def update( migrate_2_6() logger.info('Node update started') env = compose_node_env( - env_filepath, - inited_node=True, - sync_schains=False, - pull_config_for_schain=pull_config_for_schain, + # env_filepath, + # inited_node=True, + # sync_schains=False, + # pull_config_for_schain=pull_config_for_schain, node_type=node_type, node_mode=node_mode, ) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 1a64d585..713a6775 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -53,7 +53,7 @@ cleanup_no_lvm_datadir, update_node_cli_schain_status, ) -from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive +from node_cli.operations.common import configure_filebeat, unpack_backup_archive from node_cli.operations.config_repo import ( download_skale_node, sync_skale_node, @@ -76,6 +76,7 @@ from node_cli.utils.meta import CliMetaManager, FairCliMetaManager from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks +from node_cli.utils.settings import save_settings logger = logging.getLogger(__name__) @@ -134,6 +135,7 @@ def update(env_filepath: str, env: Dict, node_mode: NodeMode) -> bool: generate_nginx_config() prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) + save_settings(node_type=NodeType.SKALE, node_mode=node_mode) init_shared_space_volume(env['ENV_TYPE']) meta_manager = CliMetaManager() @@ -170,12 +172,12 @@ def init(env_filepath: str, env: dict, node_mode: NodeMode) -> None: configure_nftables(enable_monitoring=enable_monitoring) prepare_host(env_filepath, env_type=env['ENV_TYPE']) + save_settings(node_type=NodeType.SKALE, node_mode=node_mode) link_env_file() mark_active_node() configure_filebeat() - configure_flask() generate_nginx_config() lvmpy_install(env) @@ -222,7 +224,7 @@ def init_passive( NodeMode.PASSIVE, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, - check_type=CheckType.PREINSTALL + check_type=CheckType.PREINSTALL, ) if failed_checks: print_failed_requirements_checks(failed_checks) @@ -280,7 +282,7 @@ def update_passive(env_filepath: str, env: Dict) -> bool: NodeMode.PASSIVE, env['ENV_TYPE'], CONTAINER_CONFIG_PATH, - check_type=CheckType.PREINSTALL + check_type=CheckType.PREINSTALL, ) if failed_checks: print_failed_requirements_checks(failed_checks) diff --git a/node_cli/operations/common.py b/node_cli/operations/common.py index 7c876fa8..c595a3b8 100644 --- a/node_cli/operations/common.py +++ b/node_cli/operations/common.py @@ -19,7 +19,6 @@ import logging import os -import secrets import shutil import stat import tarfile @@ -27,7 +26,6 @@ from node_cli.configs import ( FILEBEAT_CONFIG_PATH, - FLASK_SECRET_KEY_FILE, G_CONF_HOME, SRC_FILEBEAT_CONFIG_PATH, ) @@ -43,17 +41,6 @@ def configure_filebeat(): logger.info('Filebeat configured') -def configure_flask(): - if os.path.isfile(FLASK_SECRET_KEY_FILE): - logger.info('Flask secret key already exists') - else: - logger.info('Generating Flask secret key...') - flask_secret_key = secrets.token_urlsafe(16) - with open(FLASK_SECRET_KEY_FILE, 'w') as f: - f.write(flask_secret_key) - logger.info('Flask secret key generated and saved') - - def unpack_backup_archive(backup_path: str) -> None: logger.info('Unpacking backup archive...') with tarfile.open(backup_path) as tar: diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index f1e62305..096d1623 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -46,7 +46,7 @@ ) from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot from node_cli.operations.base import checked_host, turn_off -from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive +from node_cli.operations.common import configure_filebeat, unpack_backup_archive from node_cli.operations.config_repo import ( sync_skale_node, update_images, @@ -70,6 +70,7 @@ from node_cli.utils.meta import FairCliMetaManager from node_cli.utils.print_formatters import print_failed_requirements_checks from node_cli.utils.node_type import NodeMode, NodeType +from node_cli.utils.settings import save_settings logger = logging.getLogger(__name__) @@ -93,11 +94,11 @@ def init_fair_boot(env_filepath: str, env: dict) -> None: configure_nftables(enable_monitoring=enable_monitoring) prepare_host(env_filepath, env_type=env['ENV_TYPE']) + save_settings(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) link_env_file() mark_active_node() configure_filebeat() - configure_flask() generate_nginx_config() prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') @@ -131,10 +132,10 @@ def init( configure_nftables() configure_filebeat() - configure_flask() generate_nginx_config() prepare_host(env_filepath, env_type=env['ENV_TYPE']) + save_settings(node_type=NodeType.FAIR, node_mode=node_mode) link_env_file() prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') @@ -186,6 +187,7 @@ def update_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMod prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') prepare_host(env_filepath, env['ENV_TYPE']) + save_settings(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream @@ -231,6 +233,7 @@ def update( generate_nginx_config() prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) + save_settings(node_type=NodeType.FAIR, node_mode=node_mode) meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 695a1253..37e3fab9 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -338,9 +338,6 @@ def compose_up( run_cmd(cmd=get_up_compose_cmd(node_type=node_type, node_mode=node_mode), env=env) return - if 'SGX_CERTIFICATES_DIR_NAME' not in env: - env['SGX_CERTIFICATES_DIR_NAME'] = SGX_CERTIFICATES_DIR_NAME - if active_fair(node_type, node_mode): logger.info('Running fair base set of containers') if is_fair_boot: diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index dd1a7151..9d96a3ca 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -30,6 +30,7 @@ import urllib.parse import urllib.request import uuid +from pathlib import Path from functools import wraps from logging import Formatter, StreamHandler from typing import Any, NoReturn, Optional @@ -85,13 +86,13 @@ def write_json(path: str, content: dict) -> None: json.dump(content, outfile, indent=4) -def save_json(path: str, content: dict) -> None: +def save_json(path: str | Path, content: dict) -> None: tmp_path = get_tmp_path(path) write_json(tmp_path, content) shutil.move(tmp_path, path) -def init_file(path, content=None): +def init_file(path: str | Path, content=None): if not os.path.exists(path): write_json(path, content) @@ -339,7 +340,7 @@ def cleanup_dir_content(folder: str) -> None: shutil.rmtree(file_path) -def safe_mkdir(path: str, print_res: bool = False) -> None: +def safe_mkdir(path: str | Path, print_res: bool = False) -> None: if os.path.exists(path): logger.debug(f'Directory {path} already exists') return @@ -411,8 +412,8 @@ def convert(self, value, param, ctx): IP_TYPE = IpType() -def get_tmp_path(path: str) -> str: - base, ext = os.path.splitext(path) +def get_tmp_path(path: str | Path) -> str: + base, ext = os.path.splitext(str(path)) salt = uuid.uuid4().hex[:5] return base + salt + '.tmp' + ext diff --git a/node_cli/utils/node_type.py b/node_cli/utils/node_type.py index bf3f6d4f..754a1d69 100644 --- a/node_cli/utils/node_type.py +++ b/node_cli/utils/node_type.py @@ -20,9 +20,9 @@ from enum import Enum -class NodeType(Enum): - SKALE = 0 - FAIR = 1 +class NodeType(str, Enum): + SKALE = 'skale' + FAIR = 'fair' class NodeMode(str, Enum): diff --git a/node_cli/utils/print_formatters.py b/node_cli/utils/print_formatters.py index 7edec113..1da07d51 100644 --- a/node_cli/utils/print_formatters.py +++ b/node_cli/utils/print_formatters.py @@ -339,9 +339,6 @@ def format_timestamp(value): return str(value) -1 - - def print_chain_record(record): print( inspect.cleandoc(f""" diff --git a/node_cli/utils/settings.py b/node_cli/utils/settings.py new file mode 100644 index 00000000..c0b3aa76 --- /dev/null +++ b/node_cli/utils/settings.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2026 SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from skale.core.settings import ( + SETTINGS_MAP, + write_node_settings_file, + write_internal_settings_file, + InternalSettings, + SkaleSettings, + SkalePassiveSettings, + FairSettings, + FairBaseSettings, +) + +from node_cli.configs import NODE_SETTINGS_PATH, INTERNAL_SETTINGS_PATH + +from node_cli.utils.node_type import NodeMode, NodeType + +InternalSettings.model_config['toml_file'] = INTERNAL_SETTINGS_PATH +SkaleSettings.model_config['toml_file'] = NODE_SETTINGS_PATH +SkalePassiveSettings.model_config['toml_file'] = NODE_SETTINGS_PATH +FairSettings.model_config['toml_file'] = NODE_SETTINGS_PATH +FairBaseSettings.model_config['toml_file'] = NODE_SETTINGS_PATH + + +def save_settings(node_type: NodeType, node_mode: NodeMode) -> None: + write_internal_settings_file(path=INTERNAL_SETTINGS_PATH, data={}) # todof: fix + settings_type = SETTINGS_MAP[(node_type.value, node_mode.value)] + write_node_settings_file( + path=NODE_SETTINGS_PATH, settings_type=settings_type, data={} + ) # todof: fix diff --git a/pyproject.toml b/pyproject.toml index cf813bfa..3f2e25d1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,15 +10,13 @@ readme = "README.md" requires-python = ">=3.13" license = { file = "LICENSE" } keywords = ["skale", "cli"] -authors = [ - { name = "SKALE Labs", email = "support@skalelabs.com" } -] +authors = [{ name = "SKALE Labs", email = "support@skalelabs.com" }] classifiers = [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: GNU Affero General Public License v3", - "Natural Language :: English", - "Programming Language :: Python :: 3.13", + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: GNU Affero General Public License v3", + "Natural Language :: English", + "Programming Language :: Python :: 3.13", ] dependencies = [ @@ -40,13 +38,14 @@ dependencies = [ "MarkupSafe==3.0.3", "Flask==3.1.2", "itsdangerous==2.2.0", - "cryptography==46.0.3", + "cryptography==46.0.5", "filelock==3.20.0", "sh==2.2.2", "python-crontab==3.3.0", "requests-mock==1.12.1", - "redis==7.1.0", - "PyInstaller==6.16.0", + "redis==7.1.1", + "PyInstaller==6.18.0", + "skale.py==7.12dev2", ] [project.urls] @@ -78,11 +77,13 @@ target-version = "py313" [tool.ruff.format] quote-style = "single" +[tool.uv] +prerelease = "allow" + + [tool.pytest.ini_options] log_cli = false log_cli_level = "INFO" log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s (%(filename)s:%(lineno)s)" log_cli_date_format = "%Y-%m-%d %H:%M:%S" -filterwarnings = [ - "ignore::DeprecationWarning", -] +filterwarnings = ["ignore::DeprecationWarning"] diff --git a/scripts/export_env.sh b/scripts/export_env.sh new file mode 100644 index 00000000..af30b4ab --- /dev/null +++ b/scripts/export_env.sh @@ -0,0 +1,8 @@ +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +PROJECT_DIR=$(dirname $DIR) + +export LVMPY_LOG_DIR="$PROJECT_DIR/tests/" +export HIDE_STREAM_LOG=true +export TEST_HOME_DIR="$PROJECT_DIR/tests/" +export GLOBAL_SKALE_DIR="$PROJECT_DIR/tests/etc/skale" +export DOTENV_FILEPATH='tests/test-env' \ No newline at end of file diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index efc72c6c..23592396 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -1,11 +1,3 @@ #!/usr/bin/env bash -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" -PROJECT_DIR=$(dirname $DIR) - -LVMPY_LOG_DIR="$PROJECT_DIR/tests/" \ - HIDE_STREAM_LOG=true \ - TEST_HOME_DIR="$PROJECT_DIR/tests/" \ - GLOBAL_SKALE_DIR="$PROJECT_DIR/tests/etc/skale" \ - DOTENV_FILEPATH='tests/test-env' \ - py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests/ $@ +py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests/ $@ diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index b31cddc0..bb5aa6d0 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -422,7 +422,6 @@ def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf, active_node resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), - mock.patch('node_cli.core.node.get_flask_secret_key'), mock.patch('node_cli.core.node.turn_on_op'), mock.patch('node_cli.core.node.is_base_containers_alive'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), @@ -475,6 +474,7 @@ def test_node_version(meta_file_v2): == "{'version': '0.1.1', 'config_stream': 'develop', 'docker_lvmpy_version': '1.1.2'}\n" ) + def test_cleanup_node(mocked_g_config): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) @@ -493,4 +493,4 @@ def test_cleanup_node(mocked_g_config): ): result = run_command(cleanup_node, ['--yes']) assert result.exit_code == 0 - cleanup_mock.assert_called_once_with(node_mode=NodeMode.ACTIVE, prune=False, env={}) \ No newline at end of file + cleanup_mock.assert_called_once_with(node_mode=NodeMode.ACTIVE, prune=False, env={}) diff --git a/tests/conftest.py b/tests/conftest.py index 5434e697..cc5a5fa3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -380,7 +380,7 @@ def fair_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') try: test_env = """ - BOOT_ENDPOINT=http://localhost:8545 + ENDPOINT=http://localhost:8545 NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 SGX_SERVER_URL=http://127.0.0.1 diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 953e7ca5..0fe113b2 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -243,7 +243,6 @@ def test_compose_node_env( inited_node, sync_schains, expected_mnt_dir, - expect_flask_key, expect_backup_run, ): user_config_path = request.getfixturevalue(test_user_conf) @@ -251,7 +250,6 @@ def test_compose_node_env( with ( mock.patch('node_cli.configs.user.validate_alias_or_address'), mock.patch('node_cli.core.node.save_env_params'), - mock.patch('node_cli.core.node.get_flask_secret_key', return_value='mock_secret'), ): result_env = compose_node_env( env_filepath=user_config_path.as_posix(), @@ -264,11 +262,6 @@ def test_compose_node_env( ) assert result_env['SCHAINS_MNT_DIR'] == expected_mnt_dir - assert ( - 'FLASK_SECRET_KEY' in result_env and result_env['FLASK_SECRET_KEY'] is not None - ) == expect_flask_key - if expect_flask_key: - assert result_env['FLASK_SECRET_KEY'] == 'mock_secret' should_have_backup = sync_schains and node_mode != NodeMode.PASSIVE assert ('BACKUP_RUN' in result_env and result_env['BACKUP_RUN'] == 'True') == should_have_backup @@ -358,7 +351,6 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n with ( mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch('node_cli.core.node.update_op'), - mock.patch('node_cli.core.node.get_flask_secret_key'), mock.patch('node_cli.core.node.save_env_params'), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.core.host.prepare_host'), @@ -500,4 +492,5 @@ def test_cleanup_success( skip_user_conf_validation=True, ) mock_cleanup_skale_op.assert_called_once_with( - node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) \ No newline at end of file + node_mode=NodeMode.ACTIVE, env=mock_env, prune=False + ) diff --git a/tests/utils/settings_test.py b/tests/utils/settings_test.py new file mode 100644 index 00000000..1d9deeb1 --- /dev/null +++ b/tests/utils/settings_test.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2026 SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from node_cli.utils.node_type import NodeMode, NodeType From 6e9c17318e3080358d281dbfe7efe7f421cd472b Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 11:09:42 +0000 Subject: [PATCH 324/332] new settings structure --- node_cli/cli/fair_boot.py | 2 +- node_cli/cli/fair_node.py | 30 +-- node_cli/cli/node.py | 4 +- node_cli/cli/passive_fair_node.py | 18 +- node_cli/cli/schains.py | 16 +- node_cli/configs/__init__.py | 4 - node_cli/configs/_user.py | 212 ----------------- node_cli/core/checks.py | 4 +- node_cli/core/host.py | 10 +- node_cli/core/node.py | 101 ++++---- node_cli/core/resources.py | 18 +- node_cli/core/schains.py | 14 +- node_cli/core/static_config.py | 8 +- node_cli/fair/active.py | 35 ++- node_cli/fair/boot.py | 30 +-- node_cli/fair/common.py | 72 ++---- node_cli/fair/record/chain_record.py | 19 +- node_cli/operations/base.py | 254 ++++++++++++--------- node_cli/operations/config_repo.py | 11 +- node_cli/operations/docker_lvmpy.py | 6 +- node_cli/operations/fair.py | 205 ++++++++++------- node_cli/utils/docker_utils.py | 10 +- node_cli/utils/helper.py | 10 - node_cli/utils/settings.py | 54 ++++- tests/cli/fair_cli_test.py | 6 +- tests/cli/node_test.py | 4 +- tests/configs/configs_env_validate_test.py | 72 ------ tests/conftest.py | 6 + tests/core/core_node_test.py | 88 ++----- tests/fair/fair_node_test.py | 13 +- tests/fixtures/__init__.py | 0 tests/fixtures/settings.py | 131 +++++++++++ tests/utils/settings_test.py | 1 - 33 files changed, 683 insertions(+), 785 deletions(-) delete mode 100644 node_cli/configs/_user.py create mode 100644 tests/fixtures/__init__.py create mode 100644 tests/fixtures/settings.py diff --git a/node_cli/cli/fair_boot.py b/node_cli/cli/fair_boot.py index f5dce9b1..d5ff6039 100644 --- a/node_cli/cli/fair_boot.py +++ b/node_cli/cli/fair_boot.py @@ -88,6 +88,6 @@ def signature_boot(validator_id): @streamed_cmd def update_node(env_file, pull_config_for_schain): update( - env_filepath=env_file, + config_file=env_file, pull_config_for_schain=pull_config_for_schain, ) diff --git a/node_cli/cli/fair_node.py b/node_cli/cli/fair_node.py index 56ab9d75..efdf3149 100644 --- a/node_cli/cli/fair_node.py +++ b/node_cli/cli/fair_node.py @@ -57,10 +57,10 @@ def fair_node_info(format): @node.command('init', help='Initialize regular Fair node') -@click.argument('env_filepath') +@click.argument('config_file') @streamed_cmd -def init_node(env_filepath: str): - init_fair(node_mode=NodeMode.ACTIVE, env_filepath=env_filepath) +def init_node(config_file: str): + init_fair(node_mode=NodeMode.ACTIVE, config_file=config_file) @node.command('register', help=TEXTS['fair']['node']['register']['help']) @@ -70,7 +70,7 @@ def register(ip: str) -> None: @node.command('update', help='Update Fair node') -@click.argument('env_filepath') +@click.argument('config_file') @click.option( '--yes', is_flag=True, @@ -88,10 +88,10 @@ def register(ip: str) -> None: is_flag=True, ) @streamed_cmd -def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: bool): +def update_node(config_file: str, pull_config_for_schain, force_skaled_start: bool): update_fair( node_mode=NodeMode.ACTIVE, - env_filepath=env_filepath, + config_file=config_file, pull_config_for_schain=pull_config_for_schain, force_skaled_start=force_skaled_start, ) @@ -106,7 +106,7 @@ def backup_node(backup_folder_path): @node.command('restore', help='Restore Fair node from a backup file.') @click.argument('backup_path') -@click.argument('env_file') +@click.argument('config_file') @click.option( '--config-only', help='Only restore configuration files in .skale and artifacts', @@ -114,12 +114,12 @@ def backup_node(backup_folder_path): hidden=True, ) @streamed_cmd -def restore_node(backup_path, env_file, config_only): - restore_fair(backup_path, env_file, config_only) +def restore_node(backup_path, config_file, config_only): + restore_fair(backup_path, config_file, config_only) @node.command('migrate', help='Switch from boot to regular Fair node.') -@click.argument('env_filepath') +@click.argument('config_file') @click.option( '--yes', is_flag=True, @@ -128,8 +128,8 @@ def restore_node(backup_path, env_file, config_only): prompt='Are you sure you want to migrate to regular Fair node? The action cannot be undone', ) @streamed_cmd -def migrate_node(env_filepath: str) -> None: - migrate_from_boot(env_filepath=env_filepath) +def migrate_node(config_file: str) -> None: + migrate_from_boot(config_file=config_file) @node.command('repair', help='Toggle fair chain repair mode') @@ -221,7 +221,7 @@ def turn_off_node() -> None: expose_value=False, prompt='Are you sure you want to turn on the node?', ) -@click.argument('env_filepath') +@click.argument('config_file') @streamed_cmd -def turn_on_node(env_filepath: str) -> None: - turn_on_fair(env_file=env_filepath, node_type=TYPE) +def turn_on_node(config_file: str) -> None: + turn_on_fair(env_file=config_file, node_type=TYPE) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index f13825d9..cb798a1c 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -108,7 +108,7 @@ def init_node(config_file): def update_node(config_file, pull_config_for_schain, unsafe_ok): update( node_mode=NodeMode.ACTIVE, - env_filepath=config_file, + config_file=config_file, pull_config_for_schain=pull_config_for_schain, node_type=TYPE, unsafe_ok=unsafe_ok, @@ -145,7 +145,7 @@ def backup_node(backup_folder_path): def restore_node(backup_path, env_file, no_snapshot, config_only): restore( backup_path=backup_path, - env_filepath=env_file, + config_file=env_file, no_snapshot=no_snapshot, config_only=config_only, node_type=TYPE, diff --git a/node_cli/cli/passive_fair_node.py b/node_cli/cli/passive_fair_node.py index 83eb1bd1..7bac3581 100644 --- a/node_cli/cli/passive_fair_node.py +++ b/node_cli/cli/passive_fair_node.py @@ -49,7 +49,7 @@ def passive_node(): @passive_node.command('init', help='Initialize a passive Fair node') -@click.argument('env_filepath') +@click.argument('config_file') @click.option('--id', required=True, type=int, help=TEXTS['fair']['node']['setup']['id']) @click.option('--indexer', help=TEXTS['passive_node']['init']['indexer'], is_flag=True) @click.option('--archive', help=TEXTS['passive_node']['init']['archive'], is_flag=True) @@ -61,7 +61,7 @@ def passive_node(): ) @streamed_cmd def init_passive_node( - env_filepath: str, id: int, indexer: bool, archive: bool, snapshot: str | None + config_file: str, id: int, indexer: bool, archive: bool, snapshot: str | None ): if indexer and archive: error_exit('Cannot use both --indexer and --archive options') @@ -69,7 +69,7 @@ def init_passive_node( error_exit('Cannot use any for indexer/archive node') init_fair( node_mode=NodeMode.PASSIVE, - env_filepath=env_filepath, + config_file=config_file, node_id=id, indexer=indexer, archive=archive, @@ -78,7 +78,7 @@ def init_passive_node( @passive_node.command('update', help='Update Fair node') -@click.argument('env_filepath') +@click.argument('config_file') @click.option( '--yes', is_flag=True, @@ -96,10 +96,10 @@ def init_passive_node( is_flag=True, ) @streamed_cmd -def update_node(env_filepath: str, pull_config_for_schain, force_skaled_start: bool): +def update_node(config_file: str, pull_config_for_schain, force_skaled_start: bool): update_fair( node_mode=NodeMode.PASSIVE, - env_filepath=env_filepath, + config_file=config_file, pull_config_for_schain=pull_config_for_schain, force_skaled_start=force_skaled_start, ) @@ -146,7 +146,7 @@ def turn_off_node() -> None: expose_value=False, prompt='Are you sure you want to turn on the node?', ) -@click.argument('env_filepath') +@click.argument('config_file') @streamed_cmd -def turn_on_node(env_filepath: str) -> None: - turn_on_fair(env_file=env_filepath, node_type=TYPE) +def turn_on_node(config_file: str) -> None: + turn_on_fair(env_file=config_file, node_type=TYPE) diff --git a/node_cli/cli/schains.py b/node_cli/cli/schains.py index 803ea754..40f1ab0c 100644 --- a/node_cli/cli/schains.py +++ b/node_cli/cli/schains.py @@ -21,6 +21,8 @@ import click +from skale.core.settings import get_settings + from node_cli.utils.helper import abort_if_false, URL_TYPE from node_cli.core.schains import ( describe, @@ -104,8 +106,12 @@ def info_(schain_name: str, json_format: bool) -> None: @click.argument('schain_name') @click.argument('snapshot_path') @click.option('--schain-type', default='medium') -@click.option('--env-type', default=None) -def restore( - schain_name: str, snapshot_path: str, schain_type: str, env_type: Optional[str] -) -> None: - restore_schain_from_snapshot(schain_name, snapshot_path, node_type=TYPE) +def restore(schain_name: str, snapshot_path: str, schain_type: str) -> None: + settings = get_settings() + restore_schain_from_snapshot( + schain_name, + snapshot_path, + node_type=TYPE, + env_type=settings.env_type, + schain_type=schain_type, + ) diff --git a/node_cli/configs/__init__.py b/node_cli/configs/__init__.py index f9fc0aa5..7ed6afee 100644 --- a/node_cli/configs/__init__.py +++ b/node_cli/configs/__init__.py @@ -55,7 +55,6 @@ CONTRACTS_PATH = os.path.join(SKALE_DIR, 'contracts_info') REPORTS_PATH = os.path.join(SKALE_DIR, 'reports') BACKUP_CONTRACTS_PATH = os.path.join(SKALE_DIR, '.old_contracts_info') -INIT_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') SKALE_RUN_DIR = '/var/run/skale' COMPOSE_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'docker-compose.yml') @@ -77,9 +76,6 @@ SGX_CERTS_PATH = os.path.join(NODE_DATA_PATH, 'sgx_certs') SCHAINS_DATA_PATH = os.path.join(NODE_DATA_PATH, 'schains') -CURRENT_FILE_LOCATION = os.path.dirname(os.path.realpath(__file__)) -DOTENV_FILEPATH = os.path.join(os.path.dirname(CURRENT_FILE_LOCATION), '.env') - SRC_FILEBEAT_CONFIG_PATH = os.path.join(CONTAINER_CONFIG_PATH, 'filebeat.yml') FILEBEAT_CONFIG_PATH = os.path.join(NODE_DATA_PATH, 'filebeat.yml') diff --git a/node_cli/configs/_user.py b/node_cli/configs/_user.py deleted file mode 100644 index 6ab7b6f5..00000000 --- a/node_cli/configs/_user.py +++ /dev/null @@ -1,212 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of node-cli -# -# Copyright (C) 2019-Present SKALE Labs -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import inspect -import os -from abc import ABC -from dataclasses import dataclass -from typing import Dict, NamedTuple - -from dotenv.main import DotEnv - -from node_cli.configs import CONTAINER_CONFIG_PATH, SKALE_DIR -from node_cli.configs.alias_address_validation import ContractType, validate_alias_or_address -from node_cli.utils.helper import error_exit -from node_cli.utils.node_type import NodeMode, NodeType -from node_cli.core.node_options import ( - active_fair, - active_skale, - passive_skale, - passive_fair, -) - -SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') -CONFIGS_ENV_FILEPATH = os.path.join(CONTAINER_CONFIG_PATH, '.env') - -ALLOWED_ENV_TYPES = ['mainnet', 'testnet', 'qanet', 'devnet'] - - -class ValidationResult(NamedTuple): - result: bool - missing: set - extra: set - - -@dataclass(kw_only=True) -class BaseUserConfig(ABC): - node_version: str - env_type: str - endpoint: str - filebeat_host: str - block_device: str - - container_configs_dir: str = '' - skip_docker_config: str = '' - skip_docker_cleanup: str = '' - - def to_env(self) -> Dict[str, str]: - result = {} - for field_name, field_value in self.__dict__.items(): - upper_key = field_name.upper() - result[upper_key] = str(field_value) if field_value is not None else '' - return result - - @classmethod - def validate_params(cls, params: Dict) -> ValidationResult: - parameters = inspect.signature(cls.__init__).parameters - missing = [] - keys = params.keys() - expected_keys = { - name.upper() - for name, value in parameters.items() - if name != 'self' and value.default == inspect._empty - } - optional_keys = { - name.upper() - for name, value in parameters.items() - if name != 'self' and value.default != inspect._empty - } - missing = expected_keys - keys - extra = keys - expected_keys - optional_keys - return ValidationResult(missing == set() and extra == set(), missing, extra) - - -@dataclass -class FairUserConfig(BaseUserConfig): - fair_contracts: str - sgx_server_url: str - enforce_btrfs: str = '' - telegraf: str = '' - influx_url: str = '' - - -@dataclass -class PassiveFairUserConfig(BaseUserConfig): - fair_contracts: str - enforce_btrfs: str = '' - - -@dataclass -class FairBootUserConfig(BaseUserConfig): - manager_contracts: str - ima_contracts: str - sgx_server_url: str - enforce_btrfs: str = '' - - -@dataclass -class SkaleUserConfig(BaseUserConfig): - manager_contracts: str - ima_contracts: str - docker_lvmpy_version: str - sgx_server_url: str - monitoring_containers: str = '' - telegraf: str = '' - influx_url: str = '' - tg_api_key: str = '' - tg_chat_id: str = '' - disable_dry_run: str = '' - default_gas_limit: str = '' - default_gas_price_wei: str = '' - bite: str = '' - - -@dataclass -class PassiveSkaleUserConfig(BaseUserConfig): - manager_contracts: str - schain_name: str = '' - ima_contracts: str = '' - enforce_btrfs: str = '' - bite: str = '' - - -def get_validated_user_config( - node_type: NodeType, - node_mode: NodeMode, - env_filepath: str = SKALE_DIR_ENV_FILEPATH, - is_fair_boot: bool = False, - skip_user_conf_validation: bool = False, -) -> BaseUserConfig: - params = parse_env_file(env_filepath) - user_config_class = get_user_config_class( - node_type=node_type, - node_mode=node_mode, - is_fair_boot=is_fair_boot, - ) - _, missing_params, extra_params = user_config_class.validate_params(params) - - if len(missing_params) > 0: - error_exit(f'Missing required parameters: {missing_params}') - - if len(extra_params) > 0: - error_exit(f'Extra parameters: {extra_params}') - - params = to_lower_keys(params) - user_config = user_config_class(**params) - if not skip_user_conf_validation: - validate_user_config(user_config) - - return user_config - - -def validate_user_config(user_config: BaseUserConfig) -> None: - validate_env_type(env_type=user_config.env_type) - - if not isinstance(user_config, FairUserConfig) and not isinstance( - user_config, PassiveFairUserConfig - ): - validate_alias_or_address( - user_config.manager_contracts, ContractType.MANAGER, user_config.endpoint - ) - - if isinstance(user_config, (SkaleUserConfig, FairBootUserConfig)): - validate_alias_or_address(user_config.ima_contracts, ContractType.IMA, user_config.endpoint) - - -def to_lower_keys(params: Dict[str, str]) -> Dict[str, str]: - return {key.lower(): value for key, value in params.items()} - - -def parse_env_file(env_filepath: str) -> Dict: - if not os.path.isfile(env_filepath): - error_exit(f'Failed to load environment from {env_filepath}') - return DotEnv(env_filepath).dict() - - -def get_user_config_class( - node_type: NodeType, - node_mode: NodeMode, - is_fair_boot: bool, -) -> type[BaseUserConfig]: - if node_type == NodeType.FAIR and is_fair_boot: - user_config_class = FairBootUserConfig - elif passive_fair(node_type, node_mode): - user_config_class = PassiveFairUserConfig - elif active_fair(node_type, node_mode): - user_config_class = FairUserConfig - elif passive_skale(node_type, node_mode): - user_config_class = PassiveSkaleUserConfig - elif active_skale(node_type, node_mode): - user_config_class = SkaleUserConfig - return user_config_class - - -def validate_env_type(env_type: str) -> None: - if env_type not in ALLOWED_ENV_TYPES: - error_exit(f'Allowed ENV_TYPE values are {ALLOWED_ENV_TYPES}. Actual: "{env_type}"') diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index a4c439c9..e7677847 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -48,6 +48,8 @@ from debian import debian_support from packaging.version import parse as version_parse +from skale.core.types import EnvType + from node_cli.configs import ( CHECK_REPORT_PATH, CONTAINER_CONFIG_PATH, @@ -471,7 +473,7 @@ def run_checks( disk: str, node_type: NodeType, node_mode: NodeMode, - env_type: str = 'mainnet', + env_type: EnvType = 'mainnet', config_path: str = CONTAINER_CONFIG_PATH, check_type: CheckType = CheckType.ALL, ) -> ResultList: diff --git a/node_cli/core/host.py b/node_cli/core/host.py index 920b651e..3a4b036a 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -19,9 +19,11 @@ import logging import os -from shutil import copyfile, chown +from shutil import chown from urllib.parse import urlparse +from skale.core.types import EnvType + from node_cli.core.resources import update_resource_allocation from node_cli.utils.helper import error_exit @@ -73,15 +75,11 @@ def fix_url(url): return False -def prepare_host(env_filepath: str, env_type: str, allocation: bool = False) -> None: - if not env_filepath or not env_type: - error_exit('Missing required parameters for host initialization') - +def prepare_host(env_type: EnvType, allocation: bool = False) -> None: try: logger.info('Preparing host started') make_dirs() chown(REDIS_DATA_PATH, user=999, group=1000) - save_env_params(env_filepath) if allocation: update_resource_allocation(env_type) diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 191d2077..8bef033c 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -33,7 +33,6 @@ BACKUP_ARCHIVE_NAME, CONTAINER_CONFIG_PATH, FILESTORAGE_MAPPING, - INIT_ENV_FILEPATH, LOG_PATH, RESTORE_SLEEP_TIMEOUT, SCHAINS_MNT_DIR_REGULAR, @@ -88,6 +87,8 @@ print_node_cmd_error, print_node_info, ) +from node_cli.utils.settings import validate_and_save_node_settings +from skale.core.settings import get_settings from node_cli.utils.texts import safe_load_texts logger = logging.getLogger(__name__) @@ -153,49 +154,62 @@ def register_node(name, p2p_ip, public_ip, port, domain_name): @check_not_inited def init(config_file: str, node_type: NodeType) -> None: node_mode = NodeMode.ACTIVE - env = compose_node_env(node_type=node_type, node_mode=node_mode) + settings = validate_and_save_node_settings(config_file, node_type, node_mode) + compose_env = compose_node_env(node_type=node_type, node_mode=node_mode) - init_op(env_filepath=config_file, env=env, node_mode=node_mode) + init_op(settings=settings, compose_env=compose_env, node_mode=node_mode) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): error_exit('Containers are not running', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) logger.info('Generating resource allocation file ...') - update_resource_allocation(env['ENV_TYPE']) + update_resource_allocation(settings.env_type) logger.info('Init procedure finished') @check_not_inited -def restore(backup_path, env_filepath, node_type: NodeType, no_snapshot=False, config_only=False): +def restore( + backup_path: str, + config_file: str, + node_type: NodeType, + no_snapshot: bool = False, + config_only: bool = False, +): node_mode = NodeMode.ACTIVE - env = compose_node_env(node_type=node_type, node_mode=node_mode) - if env is None: - return - save_env_params(env_filepath) - env['SKALE_DIR'] = SKALE_DIR - - if not no_snapshot: - logger.info('Adding BACKUP_RUN to env ...') - env['BACKUP_RUN'] = 'True' # should be str + settings = validate_and_save_node_settings(config_file, node_type, node_mode) + compose_env = compose_node_env(node_type=node_type, node_mode=node_mode) - restored_ok = restore_op(env, backup_path, node_type=node_type, config_only=config_only) + restored_ok = restore_op( + settings=settings, + compose_env=compose_env, + backup_path=backup_path, + node_type=node_type, + config_only=config_only, + backup_run=not no_snapshot, + ) if not restored_ok: error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) time.sleep(RESTORE_SLEEP_TIMEOUT) logger.info('Generating resource allocation file ...') - update_resource_allocation(env['ENV_TYPE']) + update_resource_allocation(settings.env_type) print('Node is restored from backup') @check_not_inited def init_passive( - env_filepath: str, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str] + config_file: str, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str] ) -> None: node_mode = NodeMode.PASSIVE - env = compose_node_env(env_filepath, node_type=NodeType.SKALE, node_mode=node_mode) - if env is None: - return - init_passive_op(env_filepath, env, indexer, archive, snapshot, snapshot_from) + settings = validate_and_save_node_settings(config_file, NodeType.SKALE, node_mode) + compose_env = compose_node_env(node_type=NodeType.SKALE, node_mode=node_mode) + init_passive_op( + settings=settings, + compose_env=compose_env, + indexer=indexer, + archive=archive, + snapshot=snapshot, + snapshot_from=snapshot_from, + ) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=NodeType.SKALE, node_mode=node_mode): @@ -205,13 +219,14 @@ def init_passive( @check_inited @check_user -def update_passive(env_filepath: str) -> None: +def update_passive(config_file: str) -> None: logger.info('Node update started') prev_version = CliMetaManager().get_meta_info().version if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() - env = compose_node_env(node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) - update_ok = update_passive_op(env_filepath, env) + settings = validate_and_save_node_settings(config_file, NodeType.SKALE, NodeMode.PASSIVE) + compose_env = compose_node_env(node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) + update_ok = update_passive_op(settings=settings, compose_env=compose_env) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) @@ -248,7 +263,7 @@ def compose_node_env(node_type: NodeType, node_mode: NodeMode) -> dict[str, str] @check_inited @check_user def update( - env_filepath: str, + config_file: str, pull_config_for_schain: Optional[str], node_type: NodeType, node_mode: NodeMode, @@ -264,15 +279,9 @@ def update( if (__version__ == 'test' or __version__.startswith('2.6')) and prev_version == '2.5.0': migrate_2_6() logger.info('Node update started') - env = compose_node_env( - # env_filepath, - # inited_node=True, - # sync_schains=False, - # pull_config_for_schain=pull_config_for_schain, - node_type=node_type, - node_mode=node_mode, - ) - update_ok = update_op(env_filepath, env, node_mode=node_mode) + settings = validate_and_save_node_settings(config_file, node_type, node_mode) + compose_env = compose_node_env(node_type=node_type, node_mode=node_mode) + update_ok = update_op(settings=settings, compose_env=compose_env, node_mode=node_mode) if update_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) @@ -385,24 +394,24 @@ def turn_off(node_type: NodeType, maintenance_on: bool = False, unsafe_ok: bool error_exit(error_msg, exit_code=CLIExitCodes.UNSAFE_UPDATE) if maintenance_on: set_maintenance_mode_on() - env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type, node_mode=node_mode - ) - turn_off_op(node_type=node_type, node_mode=node_mode, env=env) + compose_env = compose_node_env(node_type=node_type, node_mode=node_mode) + turn_off_op(compose_env=compose_env, node_type=node_type, node_mode=node_mode) @check_inited @check_user -def turn_on(maintenance_off, sync_schains, env_file, node_type: NodeType) -> None: +def turn_on(maintenance_off: bool, sync_schains: bool, env_file: str, node_type: NodeType) -> None: node_mode = upsert_node_mode() - env = compose_node_env( - env_file, - inited_node=True, - sync_schains=sync_schains, + settings = validate_and_save_node_settings(env_file, node_type, node_mode) + compose_env = compose_node_env(node_type=node_type, node_mode=node_mode) + backup_run = sync_schains and node_mode != NodeMode.PASSIVE + turn_on_op( + settings=settings, + compose_env=compose_env, node_type=node_type, node_mode=node_mode, + backup_run=backup_run, ) - turn_on_op(env=env, node_type=node_type, node_mode=node_mode) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): @@ -497,8 +506,8 @@ def run_checks( return if disk is None: - env_config = get_validated_user_config(node_type=node_type, node_mode=node_mode) - disk = env_config.block_device + settings = get_settings() + disk = settings.block_device failed_checks = run_host_checks(disk, node_type, node_mode, network, container_config_path) if not failed_checks: print('Requirements checking successfully finished!') diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 430eee86..9a02d0b9 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -24,7 +24,9 @@ import psutil -from node_cli.configs.user import get_validated_user_config +from skale.core.types import EnvType + +from node_cli.utils.settings import validate_and_save_node_settings from node_cli.utils.docker_utils import ensure_volume from node_cli.utils.schain_types import SchainTypes from node_cli.utils.helper import write_json, read_json, run_cmd, safe_load_yml @@ -73,7 +75,7 @@ def get_resource_allocation_info(): return None -def compose_resource_allocation_config(env_type: str, params_by_env_type: Dict = None) -> Dict: +def compose_resource_allocation_config(env_type: EnvType, params_by_env_type: Dict = None) -> Dict: params_by_env_type = params_by_env_type or safe_load_yml(STATIC_PARAMS_FILEPATH) common_config = params_by_env_type['common'] schain_cpu_alloc, ima_cpu_alloc = get_cpu_alloc(common_config) @@ -93,22 +95,20 @@ def compose_resource_allocation_config(env_type: str, params_by_env_type: Dict = def generate_resource_allocation_config( - env_file, + env_file: str, node_type: NodeType, node_mode: NodeMode, - force=False, + force: bool = False, ) -> None: if not force and os.path.isfile(RESOURCE_ALLOCATION_FILEPATH): msg = 'Resource allocation file already exists' logger.debug(msg) print(msg) return - user_config = get_validated_user_config( - node_type=node_type, node_mode=node_mode, env_filepath=env_file - ) + settings = validate_and_save_node_settings(env_file, node_type, node_mode) logger.info('Generating resource allocation file ...') try: - update_resource_allocation(user_config.env_type) + update_resource_allocation(settings.env_type) except Exception as e: logger.exception(e) print("Can't generate resource allocation file, check out CLI logs") @@ -116,7 +116,7 @@ def generate_resource_allocation_config( print(f'Resource allocation file generated: {RESOURCE_ALLOCATION_FILEPATH}') -def update_resource_allocation(env_type: str) -> None: +def update_resource_allocation(env_type: EnvType) -> None: resource_allocation_config = compose_resource_allocation_config(env_type) write_json(RESOURCE_ALLOCATION_FILEPATH, resource_allocation_config) diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 64c16031..174430e7 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -26,6 +26,8 @@ from pathlib import Path from typing import Dict, Optional +from skale.core.types import EnvType + from lvmpy.src.core import mount, volume_mountpoint from node_cli.configs import ( ALLOCATION_FILEPATH, @@ -34,7 +36,6 @@ SCHAIN_NODE_DATA_PATH, SCHAINS_MNT_DIR_SINGLE_CHAIN, ) -from node_cli.configs.user import get_validated_user_config from node_cli.utils.docker_utils import ensure_volume, is_volume_exists from node_cli.utils.exit_codes import CLIExitCodes from node_cli.utils.helper import ( @@ -214,12 +215,9 @@ def restore_schain_from_snapshot( schain: str, snapshot_path: str, node_type: NodeType, - env_type: Optional[str] = None, + env_type: EnvType, schain_type: str = 'medium', ) -> None: - if env_type is None: - user_config = get_validated_user_config(node_type=node_type) - env_type = user_config.env_type ensure_schain_volume(schain, schain_type, env_type) block_number = get_block_number_from_path(snapshot_path) if block_number == -1: @@ -242,12 +240,12 @@ def get_schains_by_artifacts() -> str: return '\n'.join(os.listdir(SCHAIN_NODE_DATA_PATH)) -def get_schain_volume_size(schain_type: str, env_type: str) -> int: +def get_schain_volume_size(schain_type: str, env_type: EnvType) -> int: alloc = safe_load_yml(ALLOCATION_FILEPATH) return alloc[env_type]['disk'][schain_type] -def ensure_schain_volume(schain: str, schain_type: str, env_type: str) -> None: +def ensure_schain_volume(schain: str, schain_type: str, env_type: EnvType) -> None: if not is_volume_exists(schain): size = get_schain_volume_size(schain_type, env_type) ensure_volume(schain, size) @@ -313,4 +311,4 @@ def cleanup_lvm_datadir(): cleanup_dir_content('/mnt/') logger.info('Removing LVM volume group "schains"...') run_cmd(['sudo', 'lvremove', '-f', 'schains'], check_code=False) - logger.info('Active node cleanup finished.') \ No newline at end of file + logger.info('Active node cleanup finished.') diff --git a/node_cli/core/static_config.py b/node_cli/core/static_config.py index a93a7a60..c2b90641 100644 --- a/node_cli/core/static_config.py +++ b/node_cli/core/static_config.py @@ -28,10 +28,12 @@ ) from node_cli.utils.node_type import NodeType +from skale.core.types import EnvType + def get_static_params( node_type: NodeType, - env_type: str = 'mainnet', + env_type: EnvType = 'mainnet', config_path: str = CONTAINER_CONFIG_PATH, ) -> dict: if node_type == NodeType.FAIR: @@ -46,7 +48,7 @@ def get_static_params( return ydata['envs'][env_type] -def get_fair_chain_name(env: dict) -> str: +def get_fair_chain_name(env_type: EnvType) -> str: node_type = NodeType.FAIR - params = get_static_params(node_type, env['ENV_TYPE']) + params = get_static_params(node_type, env_type) return params['info']['chain_name'] diff --git a/node_cli/fair/active.py b/node_cli/fair/active.py index 963686fe..2ec42bde 100644 --- a/node_cli/fair/active.py +++ b/node_cli/fair/active.py @@ -22,8 +22,8 @@ import time from typing import cast -from node_cli.configs import DEFAULT_SKALED_BASE_PORT, RESTORE_SLEEP_TIMEOUT, SKALE_DIR -from node_cli.core.host import is_node_inited, save_env_params +from node_cli.configs import DEFAULT_SKALED_BASE_PORT, RESTORE_SLEEP_TIMEOUT +from node_cli.core.host import is_node_inited from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.operations import ( FairUpdateType, @@ -35,6 +35,7 @@ from node_cli.utils.helper import error_exit, get_request, post_request from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_node_cmd_error, print_node_info_fair +from node_cli.utils.settings import validate_and_save_node_settings from node_cli.utils.texts import safe_load_texts logger = logging.getLogger(__name__) @@ -63,19 +64,14 @@ def get_node_info(format): @check_inited @check_user def migrate_from_boot( - env_filepath: str, + config_file: str, ) -> None: logger.info('Migrating from boot to fair node...') - env = compose_node_env( - env_filepath, - inited_node=True, - sync_schains=False, - node_type=NodeType.FAIR, - node_mode=NodeMode.ACTIVE, - ) + settings = validate_and_save_node_settings(config_file, NodeType.FAIR, NodeMode.ACTIVE) + compose_env = compose_node_env(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) migrate_ok = update_fair_op( - env_filepath, - env, + settings=settings, + compose_env=compose_env, node_mode=NodeMode.ACTIVE, update_type=FairUpdateType.FROM_BOOT, force_skaled_start=False, @@ -145,16 +141,17 @@ def exit() -> None: @check_not_inited -def restore(backup_path, env_filepath, config_only=False): +def restore(backup_path: str, config_file: str, config_only: bool = False): node_mode = NodeMode.ACTIVE - env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) - if env is None: - return - save_env_params(env_filepath) - env['SKALE_DIR'] = SKALE_DIR + settings = validate_and_save_node_settings(config_file, NodeType.FAIR, node_mode) + compose_env = compose_node_env(node_type=NodeType.FAIR, node_mode=node_mode) restored_ok = restore_fair_op( - node_mode=node_mode, env=env, backup_path=backup_path, config_only=config_only + node_mode=node_mode, + settings=settings, + compose_env=compose_env, + backup_path=backup_path, + config_only=config_only, ) if not restored_ok: error_exit('Restore operation failed', exit_code=CLIExitCodes.OPERATION_EXECUTION_ERROR) diff --git a/node_cli/fair/boot.py b/node_cli/fair/boot.py index a6415f75..d9da98fa 100644 --- a/node_cli/fair/boot.py +++ b/node_cli/fair/boot.py @@ -30,22 +30,19 @@ from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_node_cmd_error +from node_cli.utils.settings import validate_and_save_node_settings logger = logging.getLogger(__name__) @check_not_inited -def init(env_filepath: str) -> None: +def init(config_file: str) -> None: node_mode = NodeMode.ACTIVE node_type = NodeType.FAIR - env = compose_node_env( - env_filepath, - node_type=node_type, - node_mode=node_mode, - is_fair_boot=True, - ) + settings = validate_and_save_node_settings(config_file, node_type, node_mode) + compose_env = compose_node_env(node_type=node_type, node_mode=node_mode) - init_fair_boot_op(env_filepath, env, node_mode) + init_fair_boot_op(settings=settings, compose_env=compose_env, node_mode=node_mode) logger.info('Waiting for fair containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=node_type, node_mode=node_mode, is_fair_boot=True): @@ -55,19 +52,16 @@ def init(env_filepath: str) -> None: @check_inited @check_user -def update(env_filepath: str, pull_config_for_schain: str) -> None: +def update(config_file: str, pull_config_for_schain: str) -> None: logger.info('Fair boot node update started') node_mode = upsert_node_mode(node_mode=NodeMode.ACTIVE) - env = compose_node_env( - env_filepath, - inited_node=True, - sync_schains=False, - pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.FAIR, - node_mode=node_mode, - is_fair_boot=True, + settings = validate_and_save_node_settings(config_file, NodeType.FAIR, node_mode) + compose_env = compose_node_env(node_type=NodeType.FAIR, node_mode=node_mode) + migrate_ok = update_fair_boot_op( + settings=settings, + compose_env=compose_env, + node_mode=NodeMode.ACTIVE, ) - migrate_ok = update_fair_boot_op(env_filepath, env, node_mode=NodeMode.ACTIVE) if migrate_ok: logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 968891d4..3b506c74 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -20,9 +20,7 @@ import logging import time -from node_cli.configs import INIT_TIMEOUT, SKALE_DIR, TM_INIT_TIMEOUT -from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH -from node_cli.core.host import save_env_params +from node_cli.configs import INIT_TIMEOUT, TM_INIT_TIMEOUT from node_cli.core.node import compose_node_env, is_base_containers_alive from node_cli.core.node_options import upsert_node_mode from node_cli.fair.passive import setup_fair_passive @@ -40,7 +38,9 @@ from node_cli.utils.helper import error_exit from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_node_cmd_error +from node_cli.utils.settings import validate_and_save_node_settings from node_cli.utils.texts import safe_load_texts +from skale.core.settings import get_settings logger = logging.getLogger(__name__) TEXTS = safe_load_texts() @@ -49,21 +49,18 @@ @check_not_inited def init( node_mode: NodeMode, - env_filepath: str, + config_file: str, node_id: int | None = None, indexer: bool = False, archive: bool = False, snapshot: str | None = None, ) -> None: - env = compose_node_env(env_filepath, node_type=NodeType.FAIR, node_mode=node_mode) - if env is None: - return - save_env_params(env_filepath) - env['SKALE_DIR'] = SKALE_DIR + settings = validate_and_save_node_settings(config_file, NodeType.FAIR, node_mode) + compose_env = compose_node_env(node_type=NodeType.FAIR, node_mode=node_mode) init_ok = init_fair_op( - env_filepath, - env, + settings=settings, + compose_env=compose_env, node_mode=node_mode, indexer=indexer, archive=archive, @@ -82,14 +79,8 @@ def init( @check_user def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) - env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, - save=False, - node_type=NodeType.FAIR, - node_mode=node_mode, - skip_user_conf_validation=True, - ) - cleanup_fair_op(node_mode=node_mode, env=env, prune=prune) + compose_env = compose_node_env(node_type=NodeType.FAIR, node_mode=node_mode) + cleanup_fair_op(node_mode=node_mode, compose_env=compose_env, prune=prune) logger.info('Fair node was cleaned up, all containers and data removed') @@ -97,29 +88,23 @@ def cleanup(node_mode: NodeMode, prune: bool = False) -> None: @check_user def update( node_mode: NodeMode, - env_filepath: str, + config_file: str, pull_config_for_schain: str | None = None, force_skaled_start: bool = False, ) -> None: logger.info( 'Updating fair node: %s, pull_config_for_schain: %s, force_skaled_start: %s', - env_filepath, + config_file, pull_config_for_schain, force_skaled_start, ) node_mode = upsert_node_mode(node_mode=node_mode) - env = compose_node_env( - env_filepath, - inited_node=True, - sync_schains=False, - node_type=NodeType.FAIR, - node_mode=node_mode, - pull_config_for_schain=pull_config_for_schain, - ) + settings = validate_and_save_node_settings(config_file, NodeType.FAIR, node_mode) + compose_env = compose_node_env(node_type=NodeType.FAIR, node_mode=node_mode) update_ok = update_fair_op( - env_filepath, - env, + settings=settings, + compose_env=compose_env, node_mode=node_mode, update_type=FairUpdateType.REGULAR, force_skaled_start=force_skaled_start, @@ -133,34 +118,25 @@ def update( def repair_chain(snapshot_from: str = 'any') -> None: - node_mode = upsert_node_mode() - env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR, node_mode=node_mode - ) - repair_fair_op(env=env, snapshot_from=snapshot_from) + settings = get_settings() + repair_fair_op(env_type=settings.env_type, snapshot_from=snapshot_from) @check_inited @check_user def turn_off(node_type: NodeType) -> None: node_mode = upsert_node_mode() - env = compose_node_env( - SKALE_DIR_ENV_FILEPATH, save=False, node_type=node_type, node_mode=node_mode - ) - turn_off_op(node_type=node_type, node_mode=node_mode, env=env) + compose_env = compose_node_env(node_type=node_type, node_mode=node_mode) + turn_off_op(compose_env=compose_env, node_type=node_type, node_mode=node_mode) @check_inited @check_user -def turn_on(env_file, node_type: NodeType) -> None: +def turn_on(env_file: str, node_type: NodeType) -> None: node_mode = upsert_node_mode() - env = compose_node_env( - env_file, - inited_node=True, - node_type=node_type, - node_mode=node_mode, - ) - turn_on_op(env=env, node_type=node_type, node_mode=node_mode) + settings = validate_and_save_node_settings(env_file, node_type, node_mode) + compose_env = compose_node_env(node_type=node_type, node_mode=node_mode) + turn_on_op(settings=settings, compose_env=compose_env, node_type=node_type, node_mode=node_mode) logger.info('Waiting for containers initialization') time.sleep(TM_INIT_TIMEOUT) if not is_base_containers_alive(node_type=node_type, node_mode=node_mode): diff --git a/node_cli/fair/record/chain_record.py b/node_cli/fair/record/chain_record.py index d4ac740c..16ff8a72 100644 --- a/node_cli/fair/record/chain_record.py +++ b/node_cli/fair/record/chain_record.py @@ -22,6 +22,8 @@ from typing import cast from datetime import datetime +from skale.core.types import EnvType + from node_cli.core.static_config import get_fair_chain_name from node_cli.fair.record.redis_record import FlatRedisRecord, FieldInfo @@ -78,18 +80,17 @@ def set_force_skaled_start(self, value: bool) -> None: self._set_field('force_skaled_start', value) -def get_fair_chain_record(env: dict) -> ChainRecord: - return ChainRecord(get_fair_chain_name(env)) +def get_fair_chain_record(env_type: EnvType) -> ChainRecord: + return ChainRecord(get_fair_chain_name(env_type)) -def migrate_chain_record(env: dict) -> None: - version = env['NODE_VERSION'] - logger.info('Migrating fair chain record, setting config version to %s', version) - record = get_fair_chain_record(env) - record.set_config_version(version) +def migrate_chain_record(env_type: EnvType, node_version: str) -> None: + logger.info('Migrating fair chain record, setting config version to %s', node_version) + record = get_fair_chain_record(env_type) + record.set_config_version(node_version) -def update_chain_record(env: dict, force_skaled_start: bool) -> None: - record = get_fair_chain_record(env) +def update_chain_record(env_type: EnvType, force_skaled_start: bool) -> None: + record = get_fair_chain_record(env_type) record.set_force_skaled_start(force_skaled_start) logger.info('Updated fair chain record with force_skaled_start=%s', force_skaled_start) diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index 713a6775..c164b11a 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -20,10 +20,12 @@ import functools import logging import time -from typing import Dict, Optional +from typing import Optional import distro +from skale.core.settings import BaseNodeSettings, SkalePassiveSettings, SkaleSettings, get_settings + from node_cli.cli.info import TYPE, VERSION from node_cli.configs import ( CONTAINER_CONFIG_PATH, @@ -37,7 +39,6 @@ from node_cli.core.docker_config import cleanup_docker_configuration, configure_docker from node_cli.core.host import ( ensure_btrfs_kernel_module_autoloaded, - link_env_file, prepare_host, ) from node_cli.core.nftables import configure_nftables @@ -72,24 +73,26 @@ remove_dynamic_containers, system_prune, ) -from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool +from node_cli.utils.helper import cleanup_dir_content, rm_dir from node_cli.utils.meta import CliMetaManager, FairCliMetaManager from node_cli.utils.node_type import NodeMode, NodeType from node_cli.utils.print_formatters import print_failed_requirements_checks -from node_cli.utils.settings import save_settings +from node_cli.utils.settings import save_internal_settings logger = logging.getLogger(__name__) def checked_host(func): @functools.wraps(func) - def wrapper(env_filepath: str, env: Dict, node_mode: NodeMode, *args, **kwargs): - download_skale_node(env.get('NODE_VERSION'), env.get('CONTAINER_CONFIGS_DIR')) + def wrapper( + settings: BaseNodeSettings, compose_env: dict, node_mode: NodeMode, *args, **kwargs + ): + download_skale_node(settings.node_version, settings.container_configs_dir or None) failed_checks = run_host_checks( - env['BLOCK_DEVICE'], + settings.block_device, TYPE, node_mode, - env['ENV_TYPE'], + settings.env_type, CONTAINER_CONFIG_TMP_PATH, check_type=CheckType.PREINSTALL, ) @@ -97,15 +100,15 @@ def wrapper(env_filepath: str, env: Dict, node_mode: NodeMode, *args, **kwargs): print_failed_requirements_checks(failed_checks) return False - result = func(env_filepath, env, node_mode, *args, **kwargs) + result = func(settings, compose_env, node_mode, *args, **kwargs) if not result: return result failed_checks = run_host_checks( - env['BLOCK_DEVICE'], + settings.block_device, TYPE, node_mode, - env['ENV_TYPE'], + settings.env_type, CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, ) @@ -118,111 +121,116 @@ def wrapper(env_filepath: str, env: Dict, node_mode: NodeMode, *args, **kwargs): @checked_host -def update(env_filepath: str, env: Dict, node_mode: NodeMode) -> bool: - compose_rm(node_type=NodeType.SKALE, node_mode=node_mode, env=env) +def update(settings: BaseNodeSettings, compose_env: dict, node_mode: NodeMode) -> bool: + compose_rm(node_type=NodeType.SKALE, node_mode=node_mode, env=compose_env) remove_dynamic_containers() sync_skale_node() ensure_btrfs_kernel_module_autoloaded() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) + configure_nftables(enable_monitoring=settings.monitoring_containers) - lvmpy_install(env) + lvmpy_install(settings.block_device) generate_nginx_config() - prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) - save_settings(node_type=NodeType.SKALE, node_mode=node_mode) - init_shared_space_volume(env['ENV_TYPE']) + prepare_host(settings.env_type, allocation=True) + save_internal_settings(node_type=NodeType.SKALE, node_mode=node_mode) + init_shared_space_volume(settings.env_type) meta_manager = CliMetaManager() current_stream = meta_manager.get_meta_info().config_stream - skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['NODE_VERSION']: + if not settings.skip_docker_cleanup and current_stream != settings.node_version: logger.info( 'Stream version was changed from %s to %s', current_stream, - env['NODE_VERSION'], + settings.node_version, ) docker_cleanup() + skale_settings = get_settings(SkaleSettings) meta_manager.update_meta( VERSION, - env['NODE_VERSION'], - env['DOCKER_LVMPY_VERSION'], + settings.node_version, + skale_settings.docker_lvmpy_version, distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.SKALE, node_mode=node_mode) - compose_up(env=env, node_type=NodeType.SKALE, node_mode=node_mode) + update_images( + compose_env=compose_env, + container_configs_dir=settings.container_configs_dir, + node_type=NodeType.SKALE, + node_mode=node_mode, + ) + compose_up(env=compose_env, settings=settings, node_type=NodeType.SKALE, node_mode=node_mode) return True @checked_host -def init(env_filepath: str, env: dict, node_mode: NodeMode) -> None: +def init(settings: BaseNodeSettings, compose_env: dict, node_mode: NodeMode) -> None: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) + configure_nftables(enable_monitoring=settings.monitoring_containers) - prepare_host(env_filepath, env_type=env['ENV_TYPE']) - save_settings(node_type=NodeType.SKALE, node_mode=node_mode) - link_env_file() + prepare_host(env_type=settings.env_type) + save_internal_settings(node_type=NodeType.SKALE, node_mode=node_mode) mark_active_node() configure_filebeat() generate_nginx_config() - lvmpy_install(env) - init_shared_space_volume(env['ENV_TYPE']) + lvmpy_install(settings.block_device) + init_shared_space_volume(settings.env_type) + skale_settings = get_settings(SkaleSettings) meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], - env['DOCKER_LVMPY_VERSION'], + settings.node_version, + skale_settings.docker_lvmpy_version, distro.id(), distro.version(), ) - update_resource_allocation(env_type=env['ENV_TYPE']) - update_images(env=env, node_type=NodeType.SKALE, node_mode=node_mode) - compose_up(env=env, node_type=NodeType.SKALE, node_mode=node_mode) + update_resource_allocation(env_type=settings.env_type) + update_images( + compose_env=compose_env, + container_configs_dir=settings.container_configs_dir, + node_type=NodeType.SKALE, + node_mode=node_mode, + ) + compose_up(env=compose_env, settings=settings, node_type=NodeType.SKALE, node_mode=node_mode) def init_passive( - env_filepath: str, - env: dict, + settings: BaseNodeSettings, + compose_env: dict, indexer: bool, archive: bool, snapshot: bool, snapshot_from: Optional[str], ) -> None: - cleanup_volume_artifacts(env['BLOCK_DEVICE']) - download_skale_node(env.get('NODE_VERSION'), env.get('CONTAINER_CONFIGS_DIR')) + cleanup_volume_artifacts(settings.block_device) + download_skale_node(settings.node_version, settings.container_configs_dir or None) sync_skale_node() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) + configure_nftables(enable_monitoring=settings.monitoring_containers) - prepare_host( - env_filepath, - env_type=env['ENV_TYPE'], - ) + prepare_host(env_type=settings.env_type) + save_internal_settings(node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) failed_checks = run_host_checks( - env['BLOCK_DEVICE'], + settings.block_device, TYPE, NodeMode.PASSIVE, - env['ENV_TYPE'], + settings.env_type, CONTAINER_CONFIG_PATH, check_type=CheckType.PREINSTALL, ) @@ -232,55 +240,64 @@ def init_passive( set_passive_node_options(archive=archive, indexer=indexer) ensure_filestorage_mapping() - link_env_file() generate_nginx_config() - prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') + passive_settings = get_settings(SkalePassiveSettings) + prepare_block_device(settings.block_device, force=passive_settings.enforce_btrfs) meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], + settings.node_version, None, distro.id(), distro.version(), ) - update_resource_allocation(env_type=env['ENV_TYPE']) + update_resource_allocation(env_type=settings.env_type) - schain_name = env['SCHAIN_NAME'] - if snapshot or snapshot_from: + if passive_settings.schain_name and (snapshot or snapshot_from): ts = int(time.time()) - update_node_cli_schain_status(schain_name, repair_ts=ts, snapshot_from=snapshot_from) + update_node_cli_schain_status( + passive_settings.schain_name, repair_ts=ts, snapshot_from=snapshot_from + ) - update_images(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) - compose_up(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) + update_images( + compose_env=compose_env, + container_configs_dir=settings.container_configs_dir, + node_type=NodeType.SKALE, + node_mode=NodeMode.PASSIVE, + ) + compose_up( + env=compose_env, settings=settings, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE + ) -def update_passive(env_filepath: str, env: Dict) -> bool: - compose_rm(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) +def update_passive(settings: BaseNodeSettings, compose_env: dict) -> bool: + compose_rm(env=compose_env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) remove_dynamic_containers() - cleanup_volume_artifacts(env['BLOCK_DEVICE']) - download_skale_node(env['NODE_VERSION'], env.get('CONTAINER_CONFIGS_DIR')) + cleanup_volume_artifacts(settings.block_device) + download_skale_node(settings.node_version, settings.container_configs_dir or None) sync_skale_node() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) + configure_nftables(enable_monitoring=settings.monitoring_containers) ensure_filestorage_mapping() - prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') + passive_settings = get_settings(SkalePassiveSettings) + prepare_block_device(settings.block_device, force=passive_settings.enforce_btrfs) generate_nginx_config() - prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) + prepare_host(settings.env_type, allocation=True) + save_internal_settings(node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) failed_checks = run_host_checks( - env['BLOCK_DEVICE'], + settings.block_device, TYPE, NodeMode.PASSIVE, - env['ENV_TYPE'], + settings.env_type, CONTAINER_CONFIG_PATH, check_type=CheckType.PREINSTALL, ) @@ -290,56 +307,82 @@ def update_passive(env_filepath: str, env: Dict) -> bool: meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], + settings.node_version, None, distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) - compose_up(env=env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) + update_images( + compose_env=compose_env, + container_configs_dir=settings.container_configs_dir, + node_type=NodeType.SKALE, + node_mode=NodeMode.PASSIVE, + ) + compose_up( + env=compose_env, settings=settings, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE + ) return True -def turn_off(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: +def turn_off(compose_env: dict, node_type: NodeType, node_mode: NodeMode) -> None: logger.info('Turning off the node...') - compose_rm(env=env, node_type=node_type, node_mode=node_mode) + compose_rm(env=compose_env, node_type=node_type, node_mode=node_mode) remove_dynamic_containers() logger.info('Node was successfully turned off') -def turn_on(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: +def turn_on( + settings: BaseNodeSettings, + compose_env: dict, + node_type: NodeType, + node_mode: NodeMode, + backup_run: bool = False, +) -> None: logger.info('Turning on the node...') if node_type == NodeType.FAIR: meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], + settings.node_version, distro.id(), distro.version(), ) else: + skale_settings = get_settings((SkaleSettings, SkalePassiveSettings)) + docker_lvmpy_version = ( + skale_settings.docker_lvmpy_version + if isinstance(skale_settings, SkaleSettings) + else None + ) meta_manager = CliMetaManager() meta_manager.update_meta( - VERSION, env['NODE_VERSION'], env['DOCKER_LVMPY_VERSION'], distro.id(), distro.version() + VERSION, settings.node_version, docker_lvmpy_version, distro.id(), distro.version() ) - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) + configure_nftables(enable_monitoring=settings.monitoring_containers) + save_internal_settings(node_type=node_type, node_mode=node_mode, backup_run=backup_run) logger.info('Launching containers on the node...') - compose_up(env=env, node_type=node_type, node_mode=node_mode) + compose_up(env=compose_env, settings=settings, node_type=node_type, node_mode=node_mode) -def restore(env, backup_path, node_type: NodeType, config_only=False): +def restore( + settings: BaseNodeSettings, + compose_env: dict, + backup_path: str, + node_type: NodeType, + config_only: bool = False, + backup_run: bool = False, +) -> bool: node_mode = upsert_node_mode(node_mode=NodeMode.ACTIVE) unpack_backup_archive(backup_path) failed_checks = run_host_checks( - env['BLOCK_DEVICE'], + settings.block_device, TYPE, node_mode, - env['ENV_TYPE'], + settings.env_type, CONTAINER_CONFIG_PATH, check_type=CheckType.PREINSTALL, ) @@ -349,32 +392,32 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): ensure_btrfs_kernel_module_autoloaded() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) + configure_nftables(enable_monitoring=settings.monitoring_containers) - link_env_file() - lvmpy_install(env) - init_shared_space_volume(env['ENV_TYPE']) + lvmpy_install(settings.block_device) + init_shared_space_volume(settings.env_type) + skale_settings = get_settings(SkaleSettings) meta_manager = CliMetaManager() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], - env['DOCKER_LVMPY_VERSION'], + settings.node_version, + skale_settings.docker_lvmpy_version, distro.id(), distro.version(), ) + save_internal_settings(node_type=node_type, node_mode=node_mode, backup_run=backup_run) if not config_only: - compose_up(env=env, node_type=node_type, node_mode=node_mode) + compose_up(env=compose_env, settings=settings, node_type=node_type, node_mode=node_mode) failed_checks = run_host_checks( - env['BLOCK_DEVICE'], + settings.block_device, TYPE, node_mode, - env['ENV_TYPE'], + settings.env_type, CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, ) @@ -384,19 +427,20 @@ def restore(env, backup_path, node_type: NodeType, config_only=False): return True -def cleanup_passive(env, schain_name: str) -> None: - turn_off(env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) +def cleanup_passive(compose_env: dict, schain_name: str) -> None: + turn_off(compose_env, node_type=NodeType.SKALE, node_mode=NodeMode.PASSIVE) cleanup_no_lvm_datadir(chain_name=schain_name) rm_dir(GLOBAL_SKALE_DIR) rm_dir(SKALE_DIR) -def cleanup(node_mode: NodeMode, env: dict, prune: bool = False) -> None: - turn_off(env, node_type=NodeType.SKALE, node_mode=node_mode) +def cleanup( + node_mode: NodeMode, compose_env: dict, schain_name: Optional[str] = None, prune: bool = False +) -> None: + turn_off(compose_env, node_type=NodeType.SKALE, node_mode=node_mode) if prune: system_prune() if node_mode == NodeMode.PASSIVE: - schain_name = env['SCHAIN_NAME'] cleanup_no_lvm_datadir(chain_name=schain_name) else: cleanup_lvm_datadir() diff --git a/node_cli/operations/config_repo.py b/node_cli/operations/config_repo.py index b8456db6..19cf284a 100644 --- a/node_cli/operations/config_repo.py +++ b/node_cli/operations/config_repo.py @@ -33,12 +33,13 @@ logger = logging.getLogger(__name__) -def update_images(env: dict, node_type: NodeType, node_mode: NodeMode) -> None: - local = env.get('CONTAINER_CONFIGS_DIR') != '' - if local: - compose_build(env=env, node_type=node_type, node_mode=node_mode) +def update_images( + compose_env: dict, container_configs_dir: str, node_type: NodeType, node_mode: NodeMode +) -> None: + if container_configs_dir: + compose_build(env=compose_env, node_type=node_type, node_mode=node_mode) else: - compose_pull(env=env, node_type=node_type, node_mode=node_mode) + compose_pull(env=compose_env, node_type=node_type, node_mode=node_mode) def download_skale_node(stream: Optional[str] = None, src: Optional[str] = None) -> None: diff --git a/node_cli/operations/docker_lvmpy.py b/node_cli/operations/docker_lvmpy.py index c21328f8..eb307d55 100644 --- a/node_cli/operations/docker_lvmpy.py +++ b/node_cli/operations/docker_lvmpy.py @@ -56,12 +56,10 @@ def sync_docker_lvmpy_repo(env): sync_repo(DOCKER_LVMPY_REPO_URL, DOCKER_LVMPY_PATH, env['DOCKER_LVMPY_VERSION']) -def lvmpy_install(env): +def lvmpy_install(block_device: str) -> None: ensure_filestorage_mapping() logging.info('Configuring and starting lvmpy') - setup_lvmpy( - block_device=env['BLOCK_DEVICE'], volume_group=VOLUME_GROUP, exec_start=LVMPY_RUN_CMD - ) + setup_lvmpy(block_device=block_device, volume_group=VOLUME_GROUP, exec_start=LVMPY_RUN_CMD) init_healing_cron() logger.info('docker-lvmpy is configured and started') diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 096d1623..015abfe0 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -23,6 +23,9 @@ import distro +from skale.core.settings import BaseNodeSettings, FairBaseSettings, FairSettings, get_settings +from skale.core.types import EnvType + from node_cli.cli.info import TYPE, VERSION from node_cli.configs import ( CONTAINER_CONFIG_PATH, @@ -33,7 +36,7 @@ from node_cli.core.checks import CheckType from node_cli.core.checks import run_checks as run_host_checks from node_cli.core.docker_config import cleanup_docker_configuration, configure_docker -from node_cli.core.host import ensure_btrfs_kernel_module_autoloaded, link_env_file, prepare_host +from node_cli.core.host import ensure_btrfs_kernel_module_autoloaded, prepare_host from node_cli.core.nftables import configure_nftables from node_cli.core.nginx import generate_nginx_config from node_cli.core.schains import cleanup_no_lvm_datadir @@ -66,11 +69,11 @@ system_prune, wait_for_container, ) -from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool +from node_cli.utils.helper import cleanup_dir_content, rm_dir from node_cli.utils.meta import FairCliMetaManager from node_cli.utils.print_formatters import print_failed_requirements_checks from node_cli.utils.node_type import NodeMode, NodeType -from node_cli.utils.settings import save_settings +from node_cli.utils.settings import save_internal_settings logger = logging.getLogger(__name__) @@ -82,42 +85,56 @@ class FairUpdateType(Enum): @checked_host -def init_fair_boot(env_filepath: str, env: dict) -> None: +def init_fair_boot( + settings: BaseNodeSettings, + compose_env: dict, + node_mode: NodeMode, +) -> None: sync_skale_node() - cleanup_volume_artifacts(env['BLOCK_DEVICE']) + cleanup_volume_artifacts(settings.block_device) ensure_btrfs_kernel_module_autoloaded() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) + configure_nftables(enable_monitoring=settings.monitoring_containers) - prepare_host(env_filepath, env_type=env['ENV_TYPE']) - save_settings(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) - link_env_file() + prepare_host(env_type=settings.env_type) + save_internal_settings(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) mark_active_node() configure_filebeat() generate_nginx_config() - prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') + fair_settings = get_settings((FairSettings, FairBaseSettings)) + prepare_block_device(settings.block_device, force=fair_settings.enforce_btrfs) meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], + settings.node_version, distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) + update_images( + compose_env=compose_env, + container_configs_dir=settings.container_configs_dir, + node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, + ) - compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True) + compose_up( + env=compose_env, + settings=settings, + node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, + is_fair_boot=True, + ) @checked_host def init( - env_filepath: str, - env: dict, + settings: BaseNodeSettings, + compose_env: dict, node_mode: NodeMode, indexer: bool, archive: bool, @@ -125,24 +142,33 @@ def init( ) -> bool: sync_skale_node() ensure_btrfs_kernel_module_autoloaded() - cleanup_volume_artifacts(env['BLOCK_DEVICE']) + cleanup_volume_artifacts(settings.block_device) - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() configure_nftables() configure_filebeat() generate_nginx_config() - prepare_host(env_filepath, env_type=env['ENV_TYPE']) - save_settings(node_type=NodeType.FAIR, node_mode=node_mode) - link_env_file() + prepare_host(env_type=settings.env_type) + save_internal_settings(node_type=NodeType.FAIR, node_mode=node_mode) - prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') + fair_settings = get_settings((FairSettings, FairBaseSettings)) + prepare_block_device(settings.block_device, force=fair_settings.enforce_btrfs) - update_images(env=env, node_type=NodeType.FAIR, node_mode=node_mode) + update_images( + compose_env=compose_env, + container_configs_dir=settings.container_configs_dir, + node_type=NodeType.FAIR, + node_mode=node_mode, + ) compose_up( - env=env, node_type=NodeType.FAIR, node_mode=node_mode, services=list(REDIS_SERVICE_DICT) + env=compose_env, + settings=settings, + node_type=NodeType.FAIR, + node_mode=node_mode, + services=list(REDIS_SERVICE_DICT), ) upsert_node_mode(node_mode=node_mode) @@ -152,131 +178,159 @@ def init( if snapshot: logger.info('Waiting %s seconds for redis to start', REDIS_START_TIMEOUT) time.sleep(REDIS_START_TIMEOUT) - trigger_skaled_snapshot_mode(env=env, snapshot_from=snapshot) + trigger_skaled_snapshot_mode(env_type=settings.env_type, snapshot_from=snapshot) meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], + settings.node_version, distro.id(), distro.version(), ) - compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) + compose_up(env=compose_env, settings=settings, node_type=NodeType.FAIR, node_mode=node_mode) wait_for_container(BASE_PASSIVE_FAIR_COMPOSE_SERVICES['api']) time.sleep(REDIS_START_TIMEOUT) return True @checked_host -def update_fair_boot(env_filepath: str, env: dict, node_mode: NodeMode = NodeMode.ACTIVE) -> bool: - compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=env) +def update_fair_boot( + settings: BaseNodeSettings, + compose_env: dict, + node_mode: NodeMode = NodeMode.ACTIVE, +) -> bool: + compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=compose_env) remove_dynamic_containers() - cleanup_volume_artifacts(env['BLOCK_DEVICE']) + cleanup_volume_artifacts(settings.block_device) sync_skale_node() ensure_btrfs_kernel_module_autoloaded() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) + configure_nftables(enable_monitoring=settings.monitoring_containers) generate_nginx_config() - prepare_block_device(env['BLOCK_DEVICE'], force=env['ENFORCE_BTRFS'] == 'True') + fair_settings = get_settings((FairSettings, FairBaseSettings)) + prepare_block_device(settings.block_device, force=fair_settings.enforce_btrfs) - prepare_host(env_filepath, env['ENV_TYPE']) - save_settings(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) + prepare_host(settings.env_type) + save_internal_settings(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream - skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['NODE_VERSION']: + if not settings.skip_docker_cleanup and current_stream != settings.node_version: logger.info( 'Stream version was changed from %s to %s', current_stream, - env['NODE_VERSION'], + settings.node_version, ) docker_cleanup() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], + settings.node_version, distro.id(), distro.version(), ) - update_images(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) - compose_up(env=env, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, is_fair_boot=True) + update_images( + compose_env=compose_env, + container_configs_dir=settings.container_configs_dir, + node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, + ) + compose_up( + env=compose_env, + settings=settings, + node_type=NodeType.FAIR, + node_mode=NodeMode.ACTIVE, + is_fair_boot=True, + ) return True @checked_host def update( - env_filepath: str, - env: dict, + settings: BaseNodeSettings, + compose_env: dict, node_mode: NodeMode, update_type: FairUpdateType, force_skaled_start: bool, ) -> bool: - compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=env) + compose_rm(node_type=NodeType.FAIR, node_mode=node_mode, env=compose_env) if update_type not in (FairUpdateType.INFRA_ONLY, FairUpdateType.FROM_BOOT): remove_dynamic_containers() sync_skale_node() ensure_btrfs_kernel_module_autoloaded() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() configure_nftables() generate_nginx_config() - prepare_host(env_filepath, env['ENV_TYPE'], allocation=True) - save_settings(node_type=NodeType.FAIR, node_mode=node_mode) + prepare_host(settings.env_type, allocation=True) + save_internal_settings(node_type=NodeType.FAIR, node_mode=node_mode) meta_manager = FairCliMetaManager() current_stream = meta_manager.get_meta_info().config_stream - skip_cleanup = env.get('SKIP_DOCKER_CLEANUP') == 'True' - if not skip_cleanup and current_stream != env['NODE_VERSION']: + if not settings.skip_docker_cleanup and current_stream != settings.node_version: logger.info( 'Stream version was changed from %s to %s', current_stream, - env['NODE_VERSION'], + settings.node_version, ) docker_cleanup() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], + settings.node_version, distro.id(), distro.version(), ) - fair_chain_name = get_fair_chain_name(env) + fair_chain_name = get_fair_chain_name(settings.env_type) if update_type == FairUpdateType.FROM_BOOT: migrate_nftables_from_boot(chain_name=fair_chain_name) - update_images(env=env, node_type=NodeType.FAIR, node_mode=node_mode) + update_images( + compose_env=compose_env, + container_configs_dir=settings.container_configs_dir, + node_type=NodeType.FAIR, + node_mode=node_mode, + ) compose_up( - env=env, node_type=NodeType.FAIR, node_mode=node_mode, services=list(REDIS_SERVICE_DICT) + env=compose_env, + settings=settings, + node_type=NodeType.FAIR, + node_mode=node_mode, + services=list(REDIS_SERVICE_DICT), ) wait_for_container(REDIS_SERVICE_DICT['redis']) time.sleep(REDIS_START_TIMEOUT) if update_type == FairUpdateType.FROM_BOOT: - migrate_chain_record(env) - update_chain_record(env, force_skaled_start=force_skaled_start) - compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) + migrate_chain_record(settings.env_type, settings.node_version) + update_chain_record(settings.env_type, force_skaled_start=force_skaled_start) + compose_up(env=compose_env, settings=settings, node_type=NodeType.FAIR, node_mode=node_mode) return True -def restore(node_mode: NodeMode, env, backup_path, config_only=False): +def restore( + node_mode: NodeMode, + settings: BaseNodeSettings, + compose_env: dict, + backup_path: str, + config_only: bool = False, +) -> bool: unpack_backup_archive(backup_path) failed_checks = run_host_checks( - env['BLOCK_DEVICE'], + settings.block_device, TYPE, node_mode, - env['ENV_TYPE'], + settings.env_type, CONTAINER_CONFIG_PATH, check_type=CheckType.PREINSTALL, ) @@ -286,30 +340,27 @@ def restore(node_mode: NodeMode, env, backup_path, config_only=False): ensure_btrfs_kernel_module_autoloaded() - if env.get('SKIP_DOCKER_CONFIG') != 'True': + if not settings.skip_docker_config: configure_docker() - enable_monitoring = str_to_bool(env.get('MONITORING_CONTAINERS', 'False')) - configure_nftables(enable_monitoring=enable_monitoring) - - link_env_file() + configure_nftables(enable_monitoring=settings.monitoring_containers) meta_manager = FairCliMetaManager() meta_manager.update_meta( VERSION, - env['NODE_VERSION'], + settings.node_version, distro.id(), distro.version(), ) if not config_only: - compose_up(env=env, node_type=NodeType.FAIR, node_mode=node_mode) + compose_up(env=compose_env, settings=settings, node_type=NodeType.FAIR, node_mode=node_mode) failed_checks = run_host_checks( - env['BLOCK_DEVICE'], + settings.block_device, TYPE, node_mode, - env['ENV_TYPE'], + settings.env_type, CONTAINER_CONFIG_PATH, check_type=CheckType.POSTINSTALL, ) @@ -319,8 +370,8 @@ def restore(node_mode: NodeMode, env, backup_path, config_only=False): return True -def cleanup(node_mode: NodeMode, env: dict, prune: bool = False) -> None: - turn_off(env, node_type=NodeType.FAIR, node_mode=node_mode) +def cleanup(node_mode: NodeMode, compose_env: dict, prune: bool = False) -> None: + turn_off(compose_env, node_type=NodeType.FAIR, node_mode=node_mode) if prune: system_prune() cleanup_no_lvm_datadir() @@ -330,15 +381,15 @@ def cleanup(node_mode: NodeMode, env: dict, prune: bool = False) -> None: cleanup_docker_configuration() -def trigger_skaled_snapshot_mode(env: dict, snapshot_from: str = 'any') -> None: - record = get_fair_chain_record(env) +def trigger_skaled_snapshot_mode(env_type: EnvType, snapshot_from: str = 'any') -> None: + record = get_fair_chain_record(env_type) if not snapshot_from: snapshot_from = 'any' logger.info('Triggering skaled snapshot mode, snapshot_from: %s', snapshot_from) record.set_snapshot_from(snapshot_from) -def repair(env: dict, snapshot_from: str = 'any') -> None: +def repair(env_type: EnvType, snapshot_from: str = 'any') -> None: logger.info('Starting fair node repair') container_name = 'sk_admin' if is_admin_running(): @@ -349,7 +400,7 @@ def repair(env: dict, snapshot_from: str = 'any') -> None: logger.info('Cleaning up datadir') cleanup_no_lvm_datadir() logger.info('Requesting fair node repair') - trigger_skaled_snapshot_mode(env=env, snapshot_from=snapshot_from) + trigger_skaled_snapshot_mode(env_type=env_type, snapshot_from=snapshot_from) logger.info('Starting admin') start_container_by_name(container_name=container_name) logger.info('Fair node repair completed successfully') diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 37e3fab9..8f0e1bb1 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -29,15 +29,16 @@ from docker.errors import NotFound from docker.models.containers import Container +from skale.core.settings import BaseNodeSettings + from node_cli.configs import ( COMPOSE_PATH, FAIR_COMPOSE_PATH, NGINX_CONTAINER_NAME, REMOVED_CONTAINERS_FOLDER_PATH, - SGX_CERTIFICATES_DIR_NAME, ) from node_cli.core.node_options import active_fair, active_skale, passive_fair, passive_skale -from node_cli.utils.helper import run_cmd, str_to_bool +from node_cli.utils.helper import run_cmd from node_cli.utils.node_type import NodeMode, NodeType logger = logging.getLogger(__name__) @@ -327,6 +328,7 @@ def get_up_compose_cmd( def compose_up( env, + settings: BaseNodeSettings, node_type: NodeType, node_mode: NodeMode, is_fair_boot: bool = False, @@ -365,7 +367,7 @@ def compose_up( logger.debug('Launching skale node containers with env %s', env) run_cmd(cmd=get_up_compose_cmd(node_type=node_type, node_mode=node_mode), env=env) - if 'TG_API_KEY' in env and 'TG_CHAT_ID' in env: + if settings.tg_api_key and settings.tg_chat_id: logger.info('Running containers for Telegram notifications') run_cmd( cmd=get_up_compose_cmd( @@ -376,7 +378,7 @@ def compose_up( env=env, ) - if str_to_bool(env.get('MONITORING_CONTAINERS', 'False')): + if settings.monitoring_containers: logger.info('Running monitoring containers') run_cmd( cmd=get_up_compose_cmd( diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index 9d96a3ca..d2c598fe 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -144,16 +144,6 @@ def get_username(): return os.environ.get('USERNAME') or os.environ.get('USER') -def str_to_bool(val: str) -> bool: - val = val.lower() - if val in ('y', 'yes', 't', 'true', 'on', '1'): - return True - elif val in ('n', 'no', 'f', 'false', 'off', '0'): - return False - else: - raise ValueError(f'Invalid truth value {val!r}') - - def error_exit(error_payload: Any, exit_code: CLIExitCodes = CLIExitCodes.FAILURE) -> NoReturn: """Print error message and exit the program with specified exit code. diff --git a/node_cli/utils/settings.py b/node_cli/utils/settings.py index c0b3aa76..e10e68ce 100644 --- a/node_cli/utils/settings.py +++ b/node_cli/utils/settings.py @@ -17,19 +17,23 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . +import tomllib + +from dotenv.main import DotEnv + from skale.core.settings import ( SETTINGS_MAP, - write_node_settings_file, - write_internal_settings_file, + BaseNodeSettings, + FairBaseSettings, + FairSettings, InternalSettings, - SkaleSettings, SkalePassiveSettings, - FairSettings, - FairBaseSettings, + SkaleSettings, + write_internal_settings_file, + write_node_settings_file, ) -from node_cli.configs import NODE_SETTINGS_PATH, INTERNAL_SETTINGS_PATH - +from node_cli.configs import INTERNAL_SETTINGS_PATH, NODE_SETTINGS_PATH, SKALE_DIR from node_cli.utils.node_type import NodeMode, NodeType InternalSettings.model_config['toml_file'] = INTERNAL_SETTINGS_PATH @@ -39,9 +43,35 @@ FairBaseSettings.model_config['toml_file'] = NODE_SETTINGS_PATH -def save_settings(node_type: NodeType, node_mode: NodeMode) -> None: - write_internal_settings_file(path=INTERNAL_SETTINGS_PATH, data={}) # todof: fix +def load_config_file(filepath: str) -> dict: + if filepath.endswith('.toml'): + with open(filepath, 'rb') as f: + return tomllib.load(f) + return {k.lower(): v for k, v in DotEnv(filepath).dict().items()} + + +def validate_and_save_node_settings( + config_filepath: str, + node_type: NodeType, + node_mode: NodeMode, +) -> BaseNodeSettings: + data = load_config_file(config_filepath) settings_type = SETTINGS_MAP[(node_type.value, node_mode.value)] - write_node_settings_file( - path=NODE_SETTINGS_PATH, settings_type=settings_type, data={} - ) # todof: fix + write_node_settings_file(path=NODE_SETTINGS_PATH, settings_type=settings_type, data=data) + return settings_type() + + +def save_internal_settings( + node_type: NodeType, + node_mode: NodeMode, + backup_run: bool = False, + pull_config_for_schain: str | None = None, +) -> None: + data = { + 'node_type': node_type.value, + 'node_mode': node_mode.value, + 'skale_dir_host': str(SKALE_DIR), + 'backup_run': backup_run, + 'pull_config_for_schain': pull_config_for_schain, + } + write_internal_settings_file(path=INTERNAL_SETTINGS_PATH, data=data) diff --git a/tests/cli/fair_cli_test.py b/tests/cli/fair_cli_test.py index 3c6b9ef5..2f93d1a7 100644 --- a/tests/cli/fair_cli_test.py +++ b/tests/cli/fair_cli_test.py @@ -20,6 +20,7 @@ from tests.helper import run_command, subprocess_run_mock from tests.resources_test import BIG_DISK_SIZE + @mock.patch('node_cli.cli.fair_node.restore_fair') def test_fair_node_restore(mock_restore_core, valid_env_file, tmp_path): runner = CliRunner() @@ -105,7 +106,7 @@ def test_fair_node_migrate(mock_migrate_core, valid_env_file): result = runner.invoke(migrate_node, ['--yes', valid_env_file]) assert result.exit_code == 0, f'Output: {result.output}\nException: {result.exception}' - mock_migrate_core.assert_called_once_with(env_filepath=valid_env_file) + mock_migrate_core.assert_called_once_with(config_file=valid_env_file) @mock.patch('node_cli.cli.fair_node.exit_fair') @@ -136,4 +137,5 @@ def test_cleanup_node(mocked_g_config, inited_node): result = run_command(cleanup_node, ['--yes']) assert result.exit_code == 0 cleanup_mock.assert_called_once_with( - node_mode=NodeMode.ACTIVE, prune=False, env={'SCHAIN_NAME': 'test'}) + node_mode=NodeMode.ACTIVE, prune=False, env={'SCHAIN_NAME': 'test'} + ) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index bb5aa6d0..63e96259 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -353,12 +353,12 @@ def test_restore(request, node_type, node_mode, test_user_conf, mocked_g_config, result = run_command(restore_node, [backup_path, user_conf_path]) assert result.exit_code == 0 assert 'Node is restored from backup\n' in result.output # noqa - assert mock_restore_op.call_args[0][0].get('BACKUP_RUN') == 'True' + assert mock_restore_op.call_args.kwargs.get('backup_run') is True result = run_command(restore_node, [backup_path, user_conf_path, '--no-snapshot']) assert result.exit_code == 0 assert 'Node is restored from backup\n' in result.output # noqa - assert mock_restore_op.call_args[0][0].get('BACKUP_RUN') is None + assert mock_restore_op.call_args.kwargs.get('backup_run') is False def test_maintenance_on(): diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py index 2db057ff..52622f73 100644 --- a/tests/configs/configs_env_validate_test.py +++ b/tests/configs/configs_env_validate_test.py @@ -1,4 +1,3 @@ -import os from typing import Optional import pytest @@ -12,18 +11,6 @@ validate_contract_address, validate_contract_alias, ) -from node_cli.configs.user import ( - ALLOWED_ENV_TYPES, - FairBootUserConfig, - FairUserConfig, - PassiveFairUserConfig, - SkaleUserConfig, - PassiveSkaleUserConfig, - get_user_config_class, - get_validated_user_config, - validate_env_type, -) -from node_cli.utils.node_type import NodeType, NodeMode ENDPOINT = 'http://localhost:8545' @@ -37,44 +24,6 @@ def json(self): return self._json_data -@pytest.mark.parametrize( - 'node_type, node_mode, is_fair_boot, expected_type', - [ - (NodeType.SKALE, NodeMode.ACTIVE, False, SkaleUserConfig), - (NodeType.SKALE, NodeMode.PASSIVE, False, PassiveSkaleUserConfig), - (NodeType.FAIR, NodeMode.ACTIVE, True, FairBootUserConfig), - (NodeType.FAIR, NodeMode.ACTIVE, False, FairUserConfig), - (NodeType.FAIR, NodeMode.PASSIVE, False, PassiveFairUserConfig), - ], - ids=['skale_active', 'skale_passive', 'fair_boot', 'fair_active', 'fair_passive'], -) -def test_build_env_params_keys(node_type, node_mode, is_fair_boot, expected_type): - env_type = get_user_config_class( - node_type=node_type, node_mode=node_mode, is_fair_boot=is_fair_boot - ) - assert env_type == expected_type - - -@pytest.mark.parametrize( - 'env_types, should_fail', - [ - (ALLOWED_ENV_TYPES, False), - (['invalid'], True), - ], - ids=[ - 'correct_env', - 'invalid_env', - ], -) -def test_env_types(env_types, should_fail): - for env_type in env_types: - if should_fail: - with pytest.raises(SystemExit): - validate_env_type(env_type=env_type) - else: - validate_env_type(env_type=env_type) - - def test_get_chain_id_success(monkeypatch): fake_response = FakeResponse(200, {'result': '0x1'}) @@ -166,24 +115,3 @@ def test_validate_env_alias_or_address_with_alias(requests_mock): alias_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' requests_mock.get(alias_url, status_code=200) validate_alias_or_address('test-alias', ContractType.IMA, ENDPOINT) - - -def test_get_validated_env_config_missing_file(): - with pytest.raises(SystemExit): - get_validated_user_config( - env_filepath='nonexistent.env', node_type=NodeType.SKALE, node_mode=NodeMode.ACTIVE - ) - - -def test_get_validated_env_config_unreadable_file(tmp_path): - env_file = tmp_path / 'unreadable.env' - env_file.touch() - original_mode = env_file.stat().st_mode - try: - os.chmod(env_file, 0o000) - with pytest.raises(PermissionError): - get_validated_user_config( - env_filepath=str(env_file), node_type=NodeType.SKALE, node_mode=NodeMode.ACTIVE - ) - finally: - os.chmod(env_file, original_mode) diff --git a/tests/conftest.py b/tests/conftest.py index cc5a5fa3..e9d4b6cc 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -48,6 +48,12 @@ from node_cli.utils.docker_utils import docker_client from node_cli.utils.global_config import generate_g_config_file from node_cli.utils.node_type import NodeMode +from tests.fixtures.settings import ( # noqa: F401 + fair_active_settings, + fair_passive_settings, + skale_active_settings, + skale_passive_settings, +) from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3, TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index 0fe113b2..ead55528 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -10,9 +10,14 @@ import pytest import requests -from node_cli.configs import NODE_DATA_PATH, SCHAINS_MNT_DIR_REGULAR, SCHAINS_MNT_DIR_SINGLE_CHAIN +from node_cli.configs import ( + NODE_DATA_PATH, + SCHAINS_MNT_DIR_REGULAR, + SCHAINS_MNT_DIR_SINGLE_CHAIN, + SKALE_DIR, +) from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH -from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH + from node_cli.core.node import ( cleanup, compose_node_env, @@ -33,6 +38,8 @@ ALPINE_IMAGE_NAME = 'alpine:3.12' CMD = 'sleep 60' +SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') + WRONG_CONTAINERS = [ 'WRONG_CONTAINER_1', 'skale_WRONG_CONTAINER_4', @@ -165,105 +172,42 @@ def test_is_base_containers_alive_empty(node_type, node_mode, is_boot): @pytest.mark.parametrize( - ( - 'node_type, node_mode, test_user_conf, is_boot, inited_node, sync_schains,' - 'expected_mnt_dir, expect_flask_key, expect_backup_run' - ), + 'node_type, node_mode, expected_mnt_dir', [ ( NodeType.SKALE, NodeMode.ACTIVE, - 'regular_user_conf', - False, - True, - False, SCHAINS_MNT_DIR_REGULAR, - True, - False, - ), - ( - NodeType.SKALE, - NodeMode.ACTIVE, - 'regular_user_conf', - False, - True, - True, - SCHAINS_MNT_DIR_REGULAR, - True, - True, ), ( NodeType.SKALE, NodeMode.PASSIVE, - 'passive_user_conf', - False, - False, - False, SCHAINS_MNT_DIR_SINGLE_CHAIN, - False, - False, ), ( NodeType.FAIR, NodeMode.ACTIVE, - 'fair_boot_user_conf', - True, - True, - False, SCHAINS_MNT_DIR_SINGLE_CHAIN, - True, - False, - ), - ( - NodeType.FAIR, - NodeMode.ACTIVE, - 'fair_user_conf', - False, - True, - False, - SCHAINS_MNT_DIR_SINGLE_CHAIN, - True, - False, ), ], ids=[ 'regular', - 'regular_passive_flag', 'passive', - 'fair_boot', - 'fair_regular', + 'fair', ], ) def test_compose_node_env( - request, node_type, node_mode, - test_user_conf, - is_boot, - inited_node, - sync_schains, expected_mnt_dir, - expect_backup_run, ): - user_config_path = request.getfixturevalue(test_user_conf) - - with ( - mock.patch('node_cli.configs.user.validate_alias_or_address'), - mock.patch('node_cli.core.node.save_env_params'), - ): - result_env = compose_node_env( - env_filepath=user_config_path.as_posix(), - inited_node=inited_node, - sync_schains=sync_schains, - node_type=node_type, - node_mode=node_mode, - is_fair_boot=is_boot, - save=True, - ) + result_env = compose_node_env( + node_type=node_type, + node_mode=node_mode, + ) assert result_env['SCHAINS_MNT_DIR'] == expected_mnt_dir - should_have_backup = sync_schains and node_mode != NodeMode.PASSIVE - assert ('BACKUP_RUN' in result_env and result_env['BACKUP_RUN'] == 'True') == should_have_backup + assert 'BACKUP_RUN' not in result_env @pytest.fixture diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index a0408caa..62fcf271 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -1,9 +1,9 @@ +import os from unittest import mock import pytest from node_cli.configs import SKALE_DIR -from node_cli.configs.user import SKALE_DIR_ENV_FILEPATH from node_cli.fair.boot import init as init_boot from node_cli.fair.boot import update from node_cli.fair.common import cleanup @@ -11,6 +11,8 @@ from node_cli.operations.fair import FairUpdateType from node_cli.utils.node_type import NodeMode, NodeType +SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') + @mock.patch('node_cli.fair.active.time.sleep') @mock.patch('node_cli.fair.active.restore_fair_op') @@ -182,7 +184,8 @@ def test_cleanup_success( skip_user_conf_validation=True, ) mock_cleanup_fair_op.assert_called_once_with( - node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) + node_mode=NodeMode.ACTIVE, env=mock_env, prune=False + ) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) @@ -214,7 +217,8 @@ def test_cleanup_calls_operations_in_correct_order( save=False, node_type=mock.ANY, node_mode=NodeMode.ACTIVE, - skip_user_conf_validation=True), + skip_user_conf_validation=True, + ), mock.call.cleanup_fair_op(node_mode=NodeMode.ACTIVE, env=mock_env, prune=False), ] manager.assert_has_calls(expected_calls, any_order=False) @@ -240,7 +244,8 @@ def test_cleanup_continues_after_fair_op_error( mock_compose_env.assert_called_once() mock_cleanup_fair_op.assert_called_once_with( - node_mode=NodeMode.ACTIVE, env=mock_env, prune=False) + node_mode=NodeMode.ACTIVE, env=mock_env, prune=False + ) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=False) diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/fixtures/settings.py b/tests/fixtures/settings.py new file mode 100644 index 00000000..11e53ede --- /dev/null +++ b/tests/fixtures/settings.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# +# This file is part of node-cli +# +# Copyright (C) 2026-Present SKALE Labs +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import pytest +import tomli_w + +from skale.core.settings import get_internal_settings + +from node_cli.configs import INTERNAL_SETTINGS_PATH, NODE_SETTINGS_PATH + +SKALE_DIR_HOST = './skale-data/' + +INTERNAL_SKALE_ACTIVE = { + 'node_type': 'skale', + 'node_mode': 'active', + 'skale_dir_host': SKALE_DIR_HOST, +} + +INTERNAL_SKALE_PASSIVE = { + 'node_type': 'skale', + 'node_mode': 'passive', + 'skale_dir_host': SKALE_DIR_HOST, +} + +INTERNAL_FAIR_ACTIVE = { + 'node_type': 'fair', + 'node_mode': 'active', + 'skale_dir_host': SKALE_DIR_HOST, +} + +INTERNAL_FAIR_PASSIVE = { + 'node_type': 'fair', + 'node_mode': 'passive', + 'skale_dir_host': SKALE_DIR_HOST, +} + +_BASE_NODE = { + 'env_type': 'devnet', + 'endpoint': 'http://127.0.0.1:8545', + 'container_stop_timeout': 1, + 'tg_api_key': '123', + 'tg_chat_id': '-1231232', + 'node_version': '0.0.0', + 'block_device': '/dev/sda', +} + +NODE_SKALE_ACTIVE = { + **_BASE_NODE, + 'sgx_url': 'https://localhost:1026', + 'docker_lvmpy_version': '0.0.0', + 'manager_contracts': 'test-manager', + 'ima_contracts': 'test-ima', +} + +NODE_SKALE_PASSIVE = { + **_BASE_NODE, + 'manager_contracts': 'test-manager', + 'ima_contracts': 'test-ima', + 'schain_name': 'test-schain', + 'enforce_btrfs': False, +} + +NODE_FAIR_ACTIVE = { + **_BASE_NODE, + 'sgx_url': 'https://localhost:1026', + 'fair_contracts': 'test-fair', + 'enforce_btrfs': False, +} + +NODE_FAIR_PASSIVE = { + **_BASE_NODE, + 'fair_contracts': 'test-fair', + 'enforce_btrfs': False, +} + + +def _write_settings(internal: dict, node: dict) -> None: + INTERNAL_SETTINGS_PATH.parent.mkdir(parents=True, exist_ok=True) + INTERNAL_SETTINGS_PATH.write_bytes(tomli_w.dumps(internal).encode()) + NODE_SETTINGS_PATH.write_bytes(tomli_w.dumps(node).encode()) + get_internal_settings.cache_clear() + + +def _cleanup_settings() -> None: + INTERNAL_SETTINGS_PATH.unlink(missing_ok=True) + NODE_SETTINGS_PATH.unlink(missing_ok=True) + get_internal_settings.cache_clear() + + +@pytest.fixture +def skale_active_settings(): + _write_settings(INTERNAL_SKALE_ACTIVE, NODE_SKALE_ACTIVE) + yield + _cleanup_settings() + + +@pytest.fixture +def skale_passive_settings(): + _write_settings(INTERNAL_SKALE_PASSIVE, NODE_SKALE_PASSIVE) + yield + _cleanup_settings() + + +@pytest.fixture +def fair_active_settings(): + _write_settings(INTERNAL_FAIR_ACTIVE, NODE_FAIR_ACTIVE) + yield + _cleanup_settings() + + +@pytest.fixture +def fair_passive_settings(): + _write_settings(INTERNAL_FAIR_PASSIVE, NODE_FAIR_PASSIVE) + yield + _cleanup_settings() diff --git a/tests/utils/settings_test.py b/tests/utils/settings_test.py index 1d9deeb1..e7b3f4be 100644 --- a/tests/utils/settings_test.py +++ b/tests/utils/settings_test.py @@ -17,4 +17,3 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -from node_cli.utils.node_type import NodeMode, NodeType From 070111caad2797fb794ad95aa1515154a09bb350 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 13:15:06 +0000 Subject: [PATCH 325/332] Update node-cli to use skale core package --- node_cli/cli/node.py | 2 +- node_cli/cli/schains.py | 2 +- node_cli/core/checks.py | 2 +- node_cli/core/host.py | 2 +- node_cli/core/node.py | 4 +- node_cli/core/resources.py | 2 +- node_cli/core/schains.py | 2 +- node_cli/core/static_config.py | 2 +- node_cli/fair/common.py | 2 +- node_cli/fair/record/chain_record.py | 2 +- node_cli/operations/base.py | 2 +- node_cli/operations/fair.py | 4 +- node_cli/utils/docker_utils.py | 2 +- node_cli/utils/settings.py | 2 +- pyproject.toml | 2 +- tests/cli/fair_cli_test.py | 2 +- tests/cli/node_test.py | 6 +-- tests/cli/passive_node_test.py | 6 +-- tests/core/core_node_test.py | 18 ++------ tests/fair/fair_node_test.py | 63 ++++++++-------------------- tests/fixtures/settings.py | 2 +- 21 files changed, 43 insertions(+), 88 deletions(-) diff --git a/node_cli/cli/node.py b/node_cli/cli/node.py index cb798a1c..b34fd155 100644 --- a/node_cli/cli/node.py +++ b/node_cli/cli/node.py @@ -21,7 +21,7 @@ import click -from skale.core.types import EnvType +from skale_core.types import EnvType from node_cli.cli.info import TYPE from node_cli.core.node import ( cleanup as cleanup_skale, diff --git a/node_cli/cli/schains.py b/node_cli/cli/schains.py index 40f1ab0c..6bb1fce2 100644 --- a/node_cli/cli/schains.py +++ b/node_cli/cli/schains.py @@ -21,7 +21,7 @@ import click -from skale.core.settings import get_settings +from skale_core.settings import get_settings from node_cli.utils.helper import abort_if_false, URL_TYPE from node_cli.core.schains import ( diff --git a/node_cli/core/checks.py b/node_cli/core/checks.py index e7677847..0424577f 100644 --- a/node_cli/core/checks.py +++ b/node_cli/core/checks.py @@ -48,7 +48,7 @@ from debian import debian_support from packaging.version import parse as version_parse -from skale.core.types import EnvType +from skale_core.types import EnvType from node_cli.configs import ( CHECK_REPORT_PATH, diff --git a/node_cli/core/host.py b/node_cli/core/host.py index 3a4b036a..a040b8c4 100644 --- a/node_cli/core/host.py +++ b/node_cli/core/host.py @@ -22,7 +22,7 @@ from shutil import chown from urllib.parse import urlparse -from skale.core.types import EnvType +from skale_core.types import EnvType from node_cli.core.resources import update_resource_allocation from node_cli.utils.helper import error_exit diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 8bef033c..0398010f 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -88,7 +88,7 @@ print_node_info, ) from node_cli.utils.settings import validate_and_save_node_settings -from skale.core.settings import get_settings +from skale_core.settings import get_settings from node_cli.utils.texts import safe_load_texts logger = logging.getLogger(__name__) @@ -242,7 +242,7 @@ def update_passive(config_file: str) -> None: def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) env = compose_node_env(NodeType.SKALE, node_mode) - cleanup_skale_op(node_mode=node_mode, env=env, prune=prune) + cleanup_skale_op(node_mode=node_mode, compose_env=env, prune=prune) logger.info('SKALE node was cleaned up, all containers and data removed') diff --git a/node_cli/core/resources.py b/node_cli/core/resources.py index 9a02d0b9..34f4cac5 100644 --- a/node_cli/core/resources.py +++ b/node_cli/core/resources.py @@ -24,7 +24,7 @@ import psutil -from skale.core.types import EnvType +from skale_core.types import EnvType from node_cli.utils.settings import validate_and_save_node_settings from node_cli.utils.docker_utils import ensure_volume diff --git a/node_cli/core/schains.py b/node_cli/core/schains.py index 174430e7..8aac42f1 100644 --- a/node_cli/core/schains.py +++ b/node_cli/core/schains.py @@ -26,7 +26,7 @@ from pathlib import Path from typing import Dict, Optional -from skale.core.types import EnvType +from skale_core.types import EnvType from lvmpy.src.core import mount, volume_mountpoint from node_cli.configs import ( diff --git a/node_cli/core/static_config.py b/node_cli/core/static_config.py index c2b90641..06e0e7f4 100644 --- a/node_cli/core/static_config.py +++ b/node_cli/core/static_config.py @@ -28,7 +28,7 @@ ) from node_cli.utils.node_type import NodeType -from skale.core.types import EnvType +from skale_core.types import EnvType def get_static_params( diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 3b506c74..37fdba67 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -40,7 +40,7 @@ from node_cli.utils.print_formatters import print_node_cmd_error from node_cli.utils.settings import validate_and_save_node_settings from node_cli.utils.texts import safe_load_texts -from skale.core.settings import get_settings +from skale_core.settings import get_settings logger = logging.getLogger(__name__) TEXTS = safe_load_texts() diff --git a/node_cli/fair/record/chain_record.py b/node_cli/fair/record/chain_record.py index 16ff8a72..b6d48886 100644 --- a/node_cli/fair/record/chain_record.py +++ b/node_cli/fair/record/chain_record.py @@ -22,7 +22,7 @@ from typing import cast from datetime import datetime -from skale.core.types import EnvType +from skale_core.types import EnvType from node_cli.core.static_config import get_fair_chain_name from node_cli.fair.record.redis_record import FlatRedisRecord, FieldInfo diff --git a/node_cli/operations/base.py b/node_cli/operations/base.py index c164b11a..803fe299 100644 --- a/node_cli/operations/base.py +++ b/node_cli/operations/base.py @@ -24,7 +24,7 @@ import distro -from skale.core.settings import BaseNodeSettings, SkalePassiveSettings, SkaleSettings, get_settings +from skale_core.settings import BaseNodeSettings, SkalePassiveSettings, SkaleSettings, get_settings from node_cli.cli.info import TYPE, VERSION from node_cli.configs import ( diff --git a/node_cli/operations/fair.py b/node_cli/operations/fair.py index 015abfe0..3a48fdbd 100644 --- a/node_cli/operations/fair.py +++ b/node_cli/operations/fair.py @@ -23,8 +23,8 @@ import distro -from skale.core.settings import BaseNodeSettings, FairBaseSettings, FairSettings, get_settings -from skale.core.types import EnvType +from skale_core.settings import BaseNodeSettings, FairBaseSettings, FairSettings, get_settings +from skale_core.types import EnvType from node_cli.cli.info import TYPE, VERSION from node_cli.configs import ( diff --git a/node_cli/utils/docker_utils.py b/node_cli/utils/docker_utils.py index 8f0e1bb1..98a4946c 100644 --- a/node_cli/utils/docker_utils.py +++ b/node_cli/utils/docker_utils.py @@ -29,7 +29,7 @@ from docker.errors import NotFound from docker.models.containers import Container -from skale.core.settings import BaseNodeSettings +from skale_core.settings import BaseNodeSettings from node_cli.configs import ( COMPOSE_PATH, diff --git a/node_cli/utils/settings.py b/node_cli/utils/settings.py index e10e68ce..8e7f957c 100644 --- a/node_cli/utils/settings.py +++ b/node_cli/utils/settings.py @@ -21,7 +21,7 @@ from dotenv.main import DotEnv -from skale.core.settings import ( +from skale_core.settings import ( SETTINGS_MAP, BaseNodeSettings, FairBaseSettings, diff --git a/pyproject.toml b/pyproject.toml index 3f2e25d1..4ab3258d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,7 @@ dependencies = [ "requests-mock==1.12.1", "redis==7.1.1", "PyInstaller==6.18.0", - "skale.py==7.12dev2", + "skale.py-core==7.13.dev1", ] [project.urls] diff --git a/tests/cli/fair_cli_test.py b/tests/cli/fair_cli_test.py index 2f93d1a7..14e2aee9 100644 --- a/tests/cli/fair_cli_test.py +++ b/tests/cli/fair_cli_test.py @@ -137,5 +137,5 @@ def test_cleanup_node(mocked_g_config, inited_node): result = run_command(cleanup_node, ['--yes']) assert result.exit_code == 0 cleanup_mock.assert_called_once_with( - node_mode=NodeMode.ACTIVE, prune=False, env={'SCHAIN_NAME': 'test'} + node_mode=NodeMode.ACTIVE, prune=False, compose_env={'SCHAIN_NAME': 'test'} ) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 63e96259..04317f98 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -347,7 +347,6 @@ def test_restore(request, node_type, node_mode, test_user_conf, mocked_g_config, return_value=CliMeta(version='2.4.0', config_stream='3.0.2'), ), patch('node_cli.operations.base.configure_nftables'), - patch('node_cli.configs.user.validate_alias_or_address'), ): user_conf_path = request.getfixturevalue(test_user_conf).as_posix() result = run_command(restore_node, [backup_path, user_conf_path]) @@ -389,10 +388,8 @@ def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf, active_node resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), - mock.patch('node_cli.core.node.SKALE_DIR_ENV_FILEPATH', regular_user_conf.as_posix()), mock.patch('node_cli.core.node.turn_off_op'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - mock.patch('node_cli.configs.user.validate_alias_or_address'), mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), ): result = run_command_mock( @@ -425,7 +422,6 @@ def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf, active_node mock.patch('node_cli.core.node.turn_on_op'), mock.patch('node_cli.core.node.is_base_containers_alive'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), - mock.patch('node_cli.configs.user.validate_alias_or_address'), mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), ): result = run_command_mock( @@ -493,4 +489,4 @@ def test_cleanup_node(mocked_g_config): ): result = run_command(cleanup_node, ['--yes']) assert result.exit_code == 0 - cleanup_mock.assert_called_once_with(node_mode=NodeMode.ACTIVE, prune=False, env={}) + cleanup_mock.assert_called_once_with(node_mode=NodeMode.ACTIVE, prune=False, compose_env={}) diff --git a/tests/cli/passive_node_test.py b/tests/cli/passive_node_test.py index c9ea9527..504c5876 100644 --- a/tests/cli/passive_node_test.py +++ b/tests/cli/passive_node_test.py @@ -45,7 +45,6 @@ def test_init_passive(mocked_g_config, clean_node_options, passive_user_conf): mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.user.validate_alias_or_address'), ): result = run_command(_init_passive, [passive_user_conf.as_posix()]) @@ -77,7 +76,6 @@ def test_init_passive_archive(mocked_g_config, clean_node_options, passive_user_ mock.patch('node_cli.core.resources.get_disk_size', return_value=BIG_DISK_SIZE), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=False), - mock.patch('node_cli.configs.user.validate_alias_or_address'), mock.patch('node_cli.cli.node.TYPE', NodeType.SKALE), ): result = run_command(_init_passive, [passive_user_conf.as_posix(), '--archive']) @@ -121,7 +119,6 @@ def test_update_passive(passive_user_conf, mocked_g_config): 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), - mock.patch('node_cli.configs.user.validate_alias_or_address'), ): result = run_command(_update_passive, [passive_user_conf.as_posix(), '--yes']) assert result.exit_code == 0 @@ -146,4 +143,5 @@ def test_cleanup_node(mocked_g_config): result = run_command(cleanup_node, ['--yes']) assert result.exit_code == 0 cleanup_mock.assert_called_once_with( - node_mode=NodeMode.PASSIVE, prune=False, env={'SCHAIN_NAME': 'test'}) + node_mode=NodeMode.PASSIVE, prune=False, compose_env={'SCHAIN_NAME': 'test'} + ) diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index ead55528..b9d66960 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -14,7 +14,6 @@ NODE_DATA_PATH, SCHAINS_MNT_DIR_REGULAR, SCHAINS_MNT_DIR_SINGLE_CHAIN, - SKALE_DIR, ) from node_cli.configs.resource_allocation import RESOURCE_ALLOCATION_FILEPATH @@ -38,8 +37,6 @@ ALPINE_IMAGE_NAME = 'alpine:3.12' CMD = 'sleep 60' -SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') - WRONG_CONTAINERS = [ 'WRONG_CONTAINER_1', 'skale_WRONG_CONTAINER_4', @@ -283,9 +280,8 @@ def test_init_node(regular_user_conf, no_resource_file): # todo: write new init mock.patch('node_cli.core.node.init_op'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), mock.patch('node_cli.utils.helper.post_request', resp_mock), - mock.patch('node_cli.configs.user.validate_alias_or_address'), ): - init(env_filepath=regular_user_conf.as_posix(), node_type=NodeType.SKALE) + init(config_file=regular_user_conf.as_posix(), node_type=NodeType.SKALE) assert os.path.isfile(RESOURCE_ALLOCATION_FILEPATH) @@ -295,7 +291,6 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n with ( mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch('node_cli.core.node.update_op'), - mock.patch('node_cli.core.node.save_env_params'), mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.core.host.prepare_host'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), @@ -306,7 +301,6 @@ def test_update_node(regular_user_conf, mocked_g_config, resource_file, inited_n 'node_cli.core.node.CliMetaManager.get_meta_info', return_value=CliMeta(version='2.6.0', config_stream='3.0.2'), ), - mock.patch('node_cli.configs.user.validate_alias_or_address'), ): with mock.patch( 'node_cli.utils.helper.requests.get', return_value=safe_update_api_response() @@ -428,13 +422,7 @@ def test_cleanup_success( cleanup(node_mode=NodeMode.ACTIVE) - mock_compose_env.assert_called_once_with( - SKALE_DIR_ENV_FILEPATH, - save=False, - node_type=NodeType.SKALE, - node_mode=NodeMode.ACTIVE, - skip_user_conf_validation=True, - ) + mock_compose_env.assert_called_once_with(NodeType.SKALE, NodeMode.ACTIVE) mock_cleanup_skale_op.assert_called_once_with( - node_mode=NodeMode.ACTIVE, env=mock_env, prune=False + node_mode=NodeMode.ACTIVE, compose_env=mock_env, prune=False ) diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 62fcf271..d1908ef6 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -1,9 +1,7 @@ -import os from unittest import mock import pytest -from node_cli.configs import SKALE_DIR from node_cli.fair.boot import init as init_boot from node_cli.fair.boot import update from node_cli.fair.common import cleanup @@ -11,16 +9,12 @@ from node_cli.operations.fair import FairUpdateType from node_cli.utils.node_type import NodeMode, NodeType -SKALE_DIR_ENV_FILEPATH = os.path.join(SKALE_DIR, '.env') - @mock.patch('node_cli.fair.active.time.sleep') @mock.patch('node_cli.fair.active.restore_fair_op') -@mock.patch('node_cli.fair.active.save_env_params') @mock.patch('node_cli.fair.active.compose_node_env') def test_restore_fair( mock_compose_env, - mock_save_env, mock_restore_op, mock_sleep, valid_env_file, @@ -35,13 +29,12 @@ def test_restore_fair( restore(backup_path, valid_env_file) mock_compose_env.assert_called_once_with( - valid_env_file, node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE + node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE ) - mock_save_env.assert_called_once_with(valid_env_file) - expected_env = {**mock_env, 'SKALE_DIR': SKALE_DIR} mock_restore_op.assert_called_once_with( node_mode=NodeMode.ACTIVE, - env=expected_env, + settings=mock.ANY, + compose_env=mock_env, backup_path=backup_path, config_only=False, ) @@ -66,15 +59,12 @@ def test_init_fair_boot( init_boot(valid_env_file) mock_compose_env.assert_called_once_with( - valid_env_file, - node_type=NodeType.FAIR, - node_mode=NodeMode.ACTIVE, - is_fair_boot=True, + node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE ) mock_init_op.assert_called_once_with( - valid_env_file, - mock_env, - NodeMode.ACTIVE, + settings=mock.ANY, + compose_env=mock_env, + node_mode=NodeMode.ACTIVE, ) mock_sleep.assert_called_once() mock_is_alive.assert_called_once_with( @@ -106,17 +96,11 @@ def test_update_fair_boot( update(valid_env_file, pull_config_for_schain) mock_compose_env.assert_called_once_with( - valid_env_file, - inited_node=True, - sync_schains=False, - pull_config_for_schain=pull_config_for_schain, - node_type=NodeType.FAIR, - node_mode=NodeMode.ACTIVE, - is_fair_boot=True, + node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE ) mock_update_op.assert_called_once_with( - valid_env_file, - mock_env, + settings=mock.ANY, + compose_env=mock_env, node_mode=NodeMode.ACTIVE, ) mock_sleep.assert_called_once() @@ -144,15 +128,11 @@ def test_migrate_from_boot( migrate_from_boot(valid_env_file) mock_compose_env.assert_called_once_with( - valid_env_file, - inited_node=True, - sync_schains=False, - node_type=NodeType.FAIR, - node_mode=NodeMode.ACTIVE, + node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE ) mock_migrate_op.assert_called_once_with( - valid_env_file, - mock_env, + settings=mock.ANY, + compose_env=mock_env, node_mode=NodeMode.ACTIVE, update_type=FairUpdateType.FROM_BOOT, force_skaled_start=False, @@ -177,14 +157,10 @@ def test_cleanup_success( cleanup(node_mode=NodeMode.ACTIVE) mock_compose_env.assert_called_once_with( - SKALE_DIR_ENV_FILEPATH, - save=False, - node_type=NodeType.FAIR, - node_mode=NodeMode.ACTIVE, - skip_user_conf_validation=True, + node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE ) mock_cleanup_fair_op.assert_called_once_with( - node_mode=NodeMode.ACTIVE, env=mock_env, prune=False + node_mode=NodeMode.ACTIVE, compose_env=mock_env, prune=False ) @@ -213,13 +189,10 @@ def test_cleanup_calls_operations_in_correct_order( expected_calls = [ mock.call.compose_env( - mock.ANY, - save=False, - node_type=mock.ANY, + node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE, - skip_user_conf_validation=True, ), - mock.call.cleanup_fair_op(node_mode=NodeMode.ACTIVE, env=mock_env, prune=False), + mock.call.cleanup_fair_op(node_mode=NodeMode.ACTIVE, compose_env=mock_env, prune=False), ] manager.assert_has_calls(expected_calls, any_order=False) @@ -244,7 +217,7 @@ def test_cleanup_continues_after_fair_op_error( mock_compose_env.assert_called_once() mock_cleanup_fair_op.assert_called_once_with( - node_mode=NodeMode.ACTIVE, env=mock_env, prune=False + node_mode=NodeMode.ACTIVE, compose_env=mock_env, prune=False ) diff --git a/tests/fixtures/settings.py b/tests/fixtures/settings.py index 11e53ede..55b080b2 100644 --- a/tests/fixtures/settings.py +++ b/tests/fixtures/settings.py @@ -20,7 +20,7 @@ import pytest import tomli_w -from skale.core.settings import get_internal_settings +from skale_core.settings import get_internal_settings from node_cli.configs import INTERNAL_SETTINGS_PATH, NODE_SETTINGS_PATH From 19d277e5d5d0371543669d049b7e04195ebc8246 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 15:58:40 +0000 Subject: [PATCH 326/332] remove unused modules, fix test pipeline --- node_cli/configs/alias_address_validation.py | 118 ------------------- node_cli/utils/helper.py | 4 - scripts/run_tests.sh | 5 + tests/configs/configs_env_validate_test.py | 117 ------------------ tests/conftest.py | 17 ++- 5 files changed, 12 insertions(+), 249 deletions(-) delete mode 100644 node_cli/configs/alias_address_validation.py delete mode 100644 tests/configs/configs_env_validate_test.py diff --git a/node_cli/configs/alias_address_validation.py b/node_cli/configs/alias_address_validation.py deleted file mode 100644 index ffdbb2ee..00000000 --- a/node_cli/configs/alias_address_validation.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of node-cli -# -# Copyright (C) 2025-Present SKALE Labs -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -from enum import Enum -from typing import Dict, Optional - -import requests - -from node_cli.utils.helper import error_exit, is_contract_address - - -METADATA_URL: str = ( - 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - 'refs/heads/deployments/metadata.json' -) - - -class ContractType(Enum): - """Contract types supported by the system using skale-contracts library.""" - - IMA = 'mainnet-ima' - MANAGER = 'skale-manager' - - -def validate_alias_or_address( - alias_or_address: str, contract_type: ContractType, endpoint: str -) -> None: - if is_contract_address(alias_or_address): - validate_contract_address(alias_or_address, endpoint) - else: - validate_contract_alias(alias_or_address, contract_type, endpoint) - - -def validate_contract_address(contract_address: str, endpoint: str) -> None: - try: - response = requests.post( - endpoint, - json={ - 'jsonrpc': '2.0', - 'method': 'eth_getCode', - 'params': [contract_address, 'latest'], - 'id': 1, - }, - ) - if response.status_code != 200: - error_exit(f'Failed to verify contract at address {contract_address}') - result = response.json().get('result') - if not result or result in ['0x', '0x0']: - error_exit(f'No contract code found at address {contract_address}') - except requests.RequestException as e: - error_exit(f'Failed to validate contract address: {str(e)}') - - -def get_deployment_url(alias: str, contract_type: ContractType, network_path: str) -> str: - return ( - f'https://raw.githubusercontent.com/skalenetwork/skale-contracts/' - f'refs/heads/deployments/{network_path}/{contract_type.value}/{alias}.json' - ) - - -def validate_contract_alias(alias: str, contract_type: ContractType, endpoint: str) -> None: - try: - chain_id = get_chain_id(endpoint) - metadata = get_network_metadata() - networks = metadata.get('networks', []) - network_path: Optional[str] = None - for net in networks: - if net.get('chainId') == chain_id: - network_path = net.get('path') - break - if not network_path: - error_exit(f'Network with chain ID {chain_id} not found in metadata') - if not isinstance(network_path, str): - error_exit(f'Invalid network path type: {network_path}') - deployment_url = get_deployment_url(alias, contract_type, network_path) - if requests.get(deployment_url).status_code != 200: - error_exit(f"Contract alias '{alias}' not found for {contract_type.value}") - except requests.RequestException as e: - error_exit(f"Failed to validate contract alias '{alias}': {str(e)}") - - -def get_chain_id(endpoint: str) -> int: - try: - response = requests.post( - endpoint, - json={'jsonrpc': '2.0', 'method': 'eth_chainId', 'params': [], 'id': 1}, - ) - if response.status_code != 200: - error_exit('Failed to get chain ID from endpoint') - return int(response.json()['result'], 16) - except requests.RequestException as e: - error_exit(f'Failed to get chain ID: {str(e)}') - - -def get_network_metadata() -> Dict: - try: - response = requests.get(METADATA_URL) - if response.status_code != 200: - error_exit('Failed to fetch networks metadata') - return response.json() - except requests.RequestException as e: - error_exit(f'Failed to fetch networks metadata: {str(e)}') diff --git a/node_cli/utils/helper.py b/node_cli/utils/helper.py index d2c598fe..8a642aee 100644 --- a/node_cli/utils/helper.py +++ b/node_cli/utils/helper.py @@ -416,10 +416,6 @@ def get_ssh_port(ssh_service_name='ssh'): return DEFAULT_SSH_PORT -def is_contract_address(value: str) -> bool: - return bool(re.fullmatch(r'0x[a-fA-F0-9]{40}', value)) - - def is_btrfs_subvolume(path: str) -> bool: """Check if the given path is a Btrfs subvolume.""" try: diff --git a/scripts/run_tests.sh b/scripts/run_tests.sh index 23592396..1d3a3fe6 100755 --- a/scripts/run_tests.sh +++ b/scripts/run_tests.sh @@ -1,3 +1,8 @@ #!/usr/bin/env bash +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +PROJECT_DIR=$(dirname $DIR) + +. "$DIR/export_env.sh" + py.test --cov=$PROJECT_DIR/ --ignore=tests/core/nftables_test.py --ignore=tests/core/migration_test.py tests/ $@ diff --git a/tests/configs/configs_env_validate_test.py b/tests/configs/configs_env_validate_test.py deleted file mode 100644 index 52622f73..00000000 --- a/tests/configs/configs_env_validate_test.py +++ /dev/null @@ -1,117 +0,0 @@ -from typing import Optional - -import pytest -import requests - -from node_cli.configs.alias_address_validation import ( - ContractType, - get_chain_id, - get_network_metadata, - validate_alias_or_address, - validate_contract_address, - validate_contract_alias, -) - -ENDPOINT = 'http://localhost:8545' - - -class FakeResponse: - def __init__(self, status_code: int, json_data: Optional[dict] = None): - self.status_code = status_code - self._json_data = json_data or {} - - def json(self): - return self._json_data - - -def test_get_chain_id_success(monkeypatch): - fake_response = FakeResponse(200, {'result': '0x1'}) - - def fake_post(url, json): - return fake_response - - monkeypatch.setattr(requests, 'post', fake_post) - assert get_chain_id(ENDPOINT) == 1 - - -def test_get_chain_id_failure(monkeypatch): - fake_response = FakeResponse(404) - - def fake_post(url, json): - return fake_response - - monkeypatch.setattr(requests, 'post', fake_post) - with pytest.raises(SystemExit): - get_chain_id(ENDPOINT) - - -@pytest.mark.parametrize( - 'metadata,status_code,should_raise', - [ - ({'networks': [{'chainId': 1, 'path': 'mainnet'}]}, 200, False), - (None, 404, True), - ], -) -def test_get_network_metadata(requests_mock, metadata, status_code, should_raise): - metadata_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/metadata.json' - requests_mock.get(metadata_url, json=metadata, status_code=status_code) - - if should_raise: - with pytest.raises(SystemExit): - get_network_metadata() - else: - assert get_network_metadata() == metadata - - -@pytest.mark.parametrize( - 'code,should_raise', - [ - ('0x123', False), - ('0x', True), - ], -) -def test_validate_contract_address(requests_mock, code, should_raise): - requests_mock.post(ENDPOINT, json={'result': code}) - addr = '0x' + 'a' * 40 - if should_raise: - with pytest.raises(SystemExit): - validate_contract_address(addr, ENDPOINT) - else: - validate_contract_address(addr, ENDPOINT) - - -@pytest.mark.parametrize( - 'networks,should_raise', - [ - ([{'chainId': 1, 'path': 'mainnet'}], False), - ([], True), - ], -) -def test_validate_contract_alias(requests_mock, networks, should_raise): - requests_mock.post(ENDPOINT, json={'result': '0x1'}) - metadata_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/metadata.json' - requests_mock.get(metadata_url, json={'networks': networks}, status_code=200) - - if not should_raise: - alias_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/mainnet/skale-manager/test-alias.json' - requests_mock.get(alias_url, status_code=200) - validate_contract_alias('test-alias', ContractType.MANAGER, ENDPOINT) - else: - with pytest.raises(SystemExit): - validate_contract_alias('test-alias', ContractType.MANAGER, ENDPOINT) - - -def test_validate_env_alias_or_address_with_address(requests_mock): - addr = '0x' + 'b' * 40 - requests_mock.post(ENDPOINT, json={'result': '0x1'}) - validate_alias_or_address(addr, ContractType.IMA, ENDPOINT) - - -def test_validate_env_alias_or_address_with_alias(requests_mock): - requests_mock.post(ENDPOINT, json={'result': '0x1'}) - metadata_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/metadata.json' - metadata = {'networks': [{'chainId': 1, 'path': 'mainnet'}]} - requests_mock.get(metadata_url, json=metadata, status_code=200) - alias_url = 'https://raw.githubusercontent.com/skalenetwork/skale-contracts/refs/heads/deployments/mainnet/mainnet-ima/test-alias.json' - requests_mock.get(alias_url, status_code=200) - validate_alias_or_address('test-alias', ContractType.IMA, ENDPOINT) diff --git a/tests/conftest.py b/tests/conftest.py index e9d4b6cc..7dc5cbda 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -296,13 +296,9 @@ def tmp_passive_datadir(): def valid_env_params(): return { 'ENDPOINT': 'http://localhost:8545', - 'IMA_ENDPOINT': 'http://127.0.01', - 'DB_USER': 'user', - 'DB_PASSWORD': 'pass', - 'DB_PORT': '3307', 'NODE_VERSION': 'master', 'FILEBEAT_HOST': '127.0.0.1:3010', - 'SGX_SERVER_URL': 'http://127.0.0.1', + 'SGX_URL': 'http://127.0.0.1', 'BLOCK_DEVICE': '/dev/sss', 'DOCKER_LVMPY_VERSION': 'master', 'ENV_TYPE': 'devnet', @@ -310,6 +306,7 @@ def valid_env_params(): 'ENFORCE_BTRFS': 'False', 'MANAGER_CONTRACTS': 'test-manager', 'IMA_CONTRACTS': 'test-ima', + 'FAIR_CONTRACTS': 'test-fair', } @@ -367,7 +364,7 @@ def regular_user_conf(tmp_path): ENDPOINT=http://localhost:8545 NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 - SGX_SERVER_URL=http://127.0.0.1 + SGX_URL=http://127.0.0.1 BLOCK_DEVICE=/dev/sss DOCKER_LVMPY_VERSION='master' ENV_TYPE='devnet' @@ -389,7 +386,7 @@ def fair_user_conf(tmp_path): ENDPOINT=http://localhost:8545 NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 - SGX_SERVER_URL=http://127.0.0.1 + SGX_URL=http://127.0.0.1 BLOCK_DEVICE=/dev/sss ENV_TYPE='devnet' ENFORCE_BTRFS=False @@ -410,11 +407,10 @@ def fair_boot_user_conf(tmp_path): ENDPOINT=http://localhost:8545 NODE_VERSION='main' FILEBEAT_HOST=127.0.0.1:3010 - SGX_SERVER_URL=http://127.0.0.1 + SGX_URL=http://127.0.0.1 BLOCK_DEVICE=/dev/sss ENV_TYPE='devnet' - MANAGER_CONTRACTS='test-manager' - IMA_CONTRACTS='test-ima' + FAIR_CONTRACTS='test-fair' """ with open(test_env_path, 'w') as env_file: env_file.write(test_env) @@ -436,6 +432,7 @@ def passive_user_conf(tmp_path): SCHAIN_NAME='test-schain' ENFORCE_BTRFS=False MANAGER_CONTRACTS='test-manager' + IMA_CONTRACTS='test-ima' """ with open(test_env_path, 'w') as env_file: env_file.write(test_env) From 24ffcb0863e8833ad20cbfe352a87e0d1494d412 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 16:57:50 +0000 Subject: [PATCH 327/332] fix passive node tests --- tests/.skale/node_data/settings/node.toml | 17 +++++++++++++ tests/cli/fair_passive_node_test.py | 27 +++++++++----------- tests/cli/passive_node_test.py | 5 +++- tests/conftest.py | 31 ++++++++++++++++++++--- 4 files changed, 60 insertions(+), 20 deletions(-) create mode 100644 tests/.skale/node_data/settings/node.toml diff --git a/tests/.skale/node_data/settings/node.toml b/tests/.skale/node_data/settings/node.toml new file mode 100644 index 00000000..cbc85451 --- /dev/null +++ b/tests/.skale/node_data/settings/node.toml @@ -0,0 +1,17 @@ +env_type = "devnet" +endpoint = "http://localhost:8545/" +bite = false +container_stop_timeout = 300 +max_skaled_restart_count = 5 +disable_colors = false +node_version = "main" +block_device = "/dev/sss" +filebeat_host = "127.0.0.1:3010" +container_configs_dir = "" +skip_docker_config = false +skip_docker_cleanup = false +monitoring_containers = false +manager_contracts = "test-manager" +ima_contracts = "test-ima" +schain_name = "test-schain" +enforce_btrfs = false diff --git a/tests/cli/fair_passive_node_test.py b/tests/cli/fair_passive_node_test.py index a8b7818a..00c61ca0 100644 --- a/tests/cli/fair_passive_node_test.py +++ b/tests/cli/fair_passive_node_test.py @@ -15,15 +15,12 @@ init_default_logger() -def test_init_fair_passive(mocked_g_config, tmp_path): - env_file = tmp_path / 'test-env' - env_file.write_text('') +def test_init_fair_passive(mocked_g_config, fair_passive_settings, fair_passive_user_conf): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch('node_cli.fair.common.init_fair_op', return_value=True), mock.patch('node_cli.fair.common.compose_node_env', return_value={}), - mock.patch('node_cli.fair.common.save_env_params'), mock.patch('node_cli.fair.passive.setup_fair_passive'), mock.patch('node_cli.fair.common.time.sleep'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), @@ -34,7 +31,7 @@ def test_init_fair_passive(mocked_g_config, tmp_path): result = run_command( init_passive_node, [ - env_file.as_posix(), + fair_passive_user_conf.as_posix(), '--id', '1', ], @@ -42,15 +39,14 @@ def test_init_fair_passive(mocked_g_config, tmp_path): assert result.exit_code == 0 -def test_init_fair_passive_snapshot_any(mocked_g_config, tmp_path): - env_file = tmp_path / 'test-env' - env_file.write_text('') +def test_init_fair_passive_snapshot_any( + mocked_g_config, fair_passive_settings, fair_passive_user_conf +): pathlib.Path(SKALE_DIR).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), mock.patch('node_cli.fair.common.init_fair_op', return_value=True), mock.patch('node_cli.fair.common.compose_node_env', return_value={}), - mock.patch('node_cli.fair.common.save_env_params'), mock.patch('node_cli.fair.passive.setup_fair_passive'), mock.patch('node_cli.fair.common.time.sleep'), mock.patch('node_cli.core.node.is_base_containers_alive', return_value=True), @@ -61,7 +57,7 @@ def test_init_fair_passive_snapshot_any(mocked_g_config, tmp_path): result = run_command( init_passive_node, [ - env_file.as_posix(), + fair_passive_user_conf.as_posix(), '--id', '2', '--snapshot', @@ -71,9 +67,9 @@ def test_init_fair_passive_snapshot_any(mocked_g_config, tmp_path): assert result.exit_code == 0 -def test_update_fair_passive(mocked_g_config, tmp_path, clean_node_options): - env_file = tmp_path / 'test-env' - env_file.write_text('') +def test_update_fair_passive( + mocked_g_config, fair_passive_settings, fair_passive_user_conf, clean_node_options +): pathlib.Path(NODE_DATA_PATH).mkdir(parents=True, exist_ok=True) with ( mock.patch('subprocess.run', new=subprocess_run_mock), @@ -84,7 +80,7 @@ def test_update_fair_passive(mocked_g_config, tmp_path, clean_node_options): mock.patch('node_cli.operations.base.configure_nftables'), mock.patch('node_cli.utils.decorators.is_node_inited', return_value=True), ): - result = run_command(update_node, [env_file.as_posix(), '--yes']) + result = run_command(update_node, [fair_passive_user_conf.as_posix(), '--yes']) assert result.exit_code == 0 @@ -107,4 +103,5 @@ def test_cleanup_node(mocked_g_config): result = run_command(cleanup_node, ['--yes']) assert result.exit_code == 0 cleanup_mock.assert_called_once_with( - node_mode=NodeMode.PASSIVE, prune=False, env={'SCHAIN_NAME': 'test'}) + node_mode=NodeMode.PASSIVE, compose_env={'SCHAIN_NAME': 'test'}, prune=False + ) diff --git a/tests/cli/passive_node_test.py b/tests/cli/passive_node_test.py index 504c5876..5bb15227 100644 --- a/tests/cli/passive_node_test.py +++ b/tests/cli/passive_node_test.py @@ -65,9 +65,12 @@ def test_init_passive_archive(mocked_g_config, clean_node_options, passive_user_ mock.patch('node_cli.operations.base.sync_skale_node'), mock.patch('node_cli.operations.base.configure_docker'), mock.patch('node_cli.operations.base.prepare_host'), + mock.patch('node_cli.operations.base.save_internal_settings'), + mock.patch('node_cli.operations.base.run_host_checks', return_value=[]), + mock.patch('node_cli.operations.base.set_passive_node_options'), mock.patch('node_cli.operations.base.ensure_filestorage_mapping'), - mock.patch('node_cli.operations.base.link_env_file'), mock.patch('node_cli.operations.base.generate_nginx_config'), + mock.patch('node_cli.operations.base.get_settings'), mock.patch('node_cli.operations.base.prepare_block_device'), mock.patch('node_cli.operations.base.CliMetaManager.update_meta'), mock.patch('node_cli.operations.base.update_resource_allocation'), diff --git a/tests/conftest.py b/tests/conftest.py index 7dc5cbda..dd65b2cb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,6 +49,7 @@ from node_cli.utils.global_config import generate_g_config_file from node_cli.utils.node_type import NodeMode from tests.fixtures.settings import ( # noqa: F401 + _cleanup_settings, fair_active_settings, fair_passive_settings, skale_active_settings, @@ -300,12 +301,8 @@ def valid_env_params(): 'FILEBEAT_HOST': '127.0.0.1:3010', 'SGX_URL': 'http://127.0.0.1', 'BLOCK_DEVICE': '/dev/sss', - 'DOCKER_LVMPY_VERSION': 'master', 'ENV_TYPE': 'devnet', - 'SCHAIN_NAME': 'test', 'ENFORCE_BTRFS': 'False', - 'MANAGER_CONTRACTS': 'test-manager', - 'IMA_CONTRACTS': 'test-ima', 'FAIR_CONTRACTS': 'test-fair', } @@ -322,6 +319,7 @@ def valid_env_file(valid_env_params): finally: if file_name: os.unlink(file_name) + _cleanup_settings() @pytest.fixture @@ -376,6 +374,7 @@ def regular_user_conf(tmp_path): yield test_env_path finally: test_env_path.unlink() + _cleanup_settings() @pytest.fixture @@ -397,6 +396,7 @@ def fair_user_conf(tmp_path): yield test_env_path finally: test_env_path.unlink() + _cleanup_settings() @pytest.fixture @@ -417,6 +417,28 @@ def fair_boot_user_conf(tmp_path): yield test_env_path finally: test_env_path.unlink() + _cleanup_settings() + + +@pytest.fixture +def fair_passive_user_conf(tmp_path): + test_env_path = pathlib.Path(tmp_path / 'test-env') + try: + test_env = """ + ENDPOINT=http://localhost:8545 + NODE_VERSION='main' + FILEBEAT_HOST=127.0.0.1:3010 + BLOCK_DEVICE=/dev/sss + ENV_TYPE='devnet' + ENFORCE_BTRFS=False + FAIR_CONTRACTS='test-fair' + """ + with open(test_env_path, 'w') as env_file: + env_file.write(test_env) + yield test_env_path + finally: + test_env_path.unlink() + _cleanup_settings() @pytest.fixture @@ -439,6 +461,7 @@ def passive_user_conf(tmp_path): yield test_env_path finally: test_env_path.unlink() + _cleanup_settings() @pytest.fixture From 34a83f50366f1c8783574ea2d2e33958eb58cab4 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 17:25:58 +0000 Subject: [PATCH 328/332] fix passive node tests --- node_cli/core/node.py | 2 ++ node_cli/fair/common.py | 1 + tests/.skale/node_data/settings/node.toml | 17 ----------------- tests/cli/passive_node_test.py | 1 - 4 files changed, 3 insertions(+), 18 deletions(-) delete mode 100644 tests/.skale/node_data/settings/node.toml diff --git a/node_cli/core/node.py b/node_cli/core/node.py index 0398010f..7c9e2409 100644 --- a/node_cli/core/node.py +++ b/node_cli/core/node.py @@ -247,6 +247,7 @@ def cleanup(node_mode: NodeMode, prune: bool = False) -> None: def compose_node_env(node_type: NodeType, node_mode: NodeMode) -> dict[str, str]: + st = get_settings() if node_mode == NodeMode.PASSIVE or node_type == NodeType.FAIR: mnt_dir = SCHAINS_MNT_DIR_SINGLE_CHAIN else: @@ -256,6 +257,7 @@ def compose_node_env(node_type: NodeType, node_mode: NodeMode) -> dict[str, str] 'SCHAINS_MNT_DIR': mnt_dir, 'FILESTORAGE_MAPPING': FILESTORAGE_MAPPING, 'SKALE_LIB_PATH': SKALE_STATE_DIR, + 'FILEBEAT_HOST': st.filebeat_host, } return {k: v for k, v in env.items() if v != ''} diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 37fdba67..98f10a7f 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -76,6 +76,7 @@ def init( print('Fair node is initialized') +@check_inited @check_user def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) diff --git a/tests/.skale/node_data/settings/node.toml b/tests/.skale/node_data/settings/node.toml deleted file mode 100644 index cbc85451..00000000 --- a/tests/.skale/node_data/settings/node.toml +++ /dev/null @@ -1,17 +0,0 @@ -env_type = "devnet" -endpoint = "http://localhost:8545/" -bite = false -container_stop_timeout = 300 -max_skaled_restart_count = 5 -disable_colors = false -node_version = "main" -block_device = "/dev/sss" -filebeat_host = "127.0.0.1:3010" -container_configs_dir = "" -skip_docker_config = false -skip_docker_cleanup = false -monitoring_containers = false -manager_contracts = "test-manager" -ima_contracts = "test-ima" -schain_name = "test-schain" -enforce_btrfs = false diff --git a/tests/cli/passive_node_test.py b/tests/cli/passive_node_test.py index 5bb15227..e8419220 100644 --- a/tests/cli/passive_node_test.py +++ b/tests/cli/passive_node_test.py @@ -67,7 +67,6 @@ def test_init_passive_archive(mocked_g_config, clean_node_options, passive_user_ mock.patch('node_cli.operations.base.prepare_host'), mock.patch('node_cli.operations.base.save_internal_settings'), mock.patch('node_cli.operations.base.run_host_checks', return_value=[]), - mock.patch('node_cli.operations.base.set_passive_node_options'), mock.patch('node_cli.operations.base.ensure_filestorage_mapping'), mock.patch('node_cli.operations.base.generate_nginx_config'), mock.patch('node_cli.operations.base.get_settings'), From 7afb2318253bfb6b90ecbf6c13ef0ecd0c4f37ce Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 18:18:24 +0000 Subject: [PATCH 329/332] update fixtures --- tests/conftest.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index dd65b2cb..f42034bd 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -49,7 +49,16 @@ from node_cli.utils.global_config import generate_g_config_file from node_cli.utils.node_type import NodeMode from tests.fixtures.settings import ( # noqa: F401 + INTERNAL_FAIR_ACTIVE, + INTERNAL_FAIR_PASSIVE, + INTERNAL_SKALE_ACTIVE, + INTERNAL_SKALE_PASSIVE, + NODE_FAIR_ACTIVE, + NODE_FAIR_PASSIVE, + NODE_SKALE_ACTIVE, + NODE_SKALE_PASSIVE, _cleanup_settings, + _write_settings, fair_active_settings, fair_passive_settings, skale_active_settings, @@ -357,6 +366,7 @@ def set_env_var(name, value): @pytest.fixture def regular_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') + _write_settings(INTERNAL_SKALE_ACTIVE, NODE_SKALE_ACTIVE) try: test_env = """ ENDPOINT=http://localhost:8545 @@ -380,6 +390,7 @@ def regular_user_conf(tmp_path): @pytest.fixture def fair_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') + _write_settings(INTERNAL_FAIR_ACTIVE, NODE_FAIR_ACTIVE) try: test_env = """ ENDPOINT=http://localhost:8545 @@ -402,6 +413,7 @@ def fair_user_conf(tmp_path): @pytest.fixture def fair_boot_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') + _write_settings(INTERNAL_FAIR_ACTIVE, NODE_FAIR_ACTIVE) try: test_env = """ ENDPOINT=http://localhost:8545 @@ -423,6 +435,7 @@ def fair_boot_user_conf(tmp_path): @pytest.fixture def fair_passive_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') + _write_settings(INTERNAL_FAIR_PASSIVE, NODE_FAIR_PASSIVE) try: test_env = """ ENDPOINT=http://localhost:8545 @@ -444,6 +457,7 @@ def fair_passive_user_conf(tmp_path): @pytest.fixture def passive_user_conf(tmp_path): test_env_path = pathlib.Path(tmp_path / 'test-env') + _write_settings(INTERNAL_SKALE_PASSIVE, NODE_SKALE_PASSIVE) try: test_env = """ ENDPOINT=http://localhost:8545 From df5bdded17b2d896abeafbd28a5041b3b97aba0a Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 19:18:13 +0000 Subject: [PATCH 330/332] fix fair node tests --- node_cli/fair/common.py | 1 - tests/cli/node_test.py | 4 ++-- tests/conftest.py | 22 ++++++++++++++++++++++ tests/fair/fair_node_test.py | 30 ++++++++++-------------------- 4 files changed, 34 insertions(+), 23 deletions(-) diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 98f10a7f..37fdba67 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -76,7 +76,6 @@ def init( print('Fair node is initialized') -@check_inited @check_user def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) diff --git a/tests/cli/node_test.py b/tests/cli/node_test.py index 04317f98..a0c064f0 100644 --- a/tests/cli/node_test.py +++ b/tests/cli/node_test.py @@ -384,7 +384,7 @@ def test_maintenance_off(mocked_g_config): ) -def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf, active_node_option): +def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf, active_node_option, skale_active_settings): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), @@ -415,7 +415,7 @@ def test_turn_off_maintenance_on(mocked_g_config, regular_user_conf, active_node assert result.exit_code == CLIExitCodes.UNSAFE_UPDATE -def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf, active_node_option): +def test_turn_on_maintenance_off(mocked_g_config, regular_user_conf, active_node_option, skale_active_settings): resp_mock = response_mock(requests.codes.ok, {'status': 'ok', 'payload': None}) with ( mock.patch('subprocess.run', new=subprocess_run_mock), diff --git a/tests/conftest.py b/tests/conftest.py index f42034bd..3d6cc858 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -66,6 +66,28 @@ ) from tests.helper import TEST_META_V1, TEST_META_V2, TEST_META_V3, TEST_SCHAINS_MNT_DIR_SINGLE_CHAIN +TIMEOUT_PATCHES = [ + 'node_cli.configs.TM_INIT_TIMEOUT', + 'node_cli.configs.RESTORE_SLEEP_TIMEOUT', + 'node_cli.configs.INIT_TIMEOUT', + 'node_cli.core.node.TM_INIT_TIMEOUT', + 'node_cli.core.node.RESTORE_SLEEP_TIMEOUT', + 'node_cli.fair.common.TM_INIT_TIMEOUT', + 'node_cli.fair.common.INIT_TIMEOUT', + 'node_cli.fair.boot.TM_INIT_TIMEOUT', + 'node_cli.fair.active.RESTORE_SLEEP_TIMEOUT', +] + + +@pytest.fixture(autouse=True, scope='session') +def _fast_timeouts(): + patchers = [mock.patch(target, 1) for target in TIMEOUT_PATCHES] + for p in patchers: + p.start() + yield + for p in patchers: + p.stop() + @pytest.fixture() def tmp_dir_path(): diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index d1908ef6..28ade830 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -28,9 +28,7 @@ def test_restore_fair( restore(backup_path, valid_env_file) - mock_compose_env.assert_called_once_with( - node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE - ) + mock_compose_env.assert_called_once_with(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) mock_restore_op.assert_called_once_with( node_mode=NodeMode.ACTIVE, settings=mock.ANY, @@ -58,9 +56,7 @@ def test_init_fair_boot( init_boot(valid_env_file) - mock_compose_env.assert_called_once_with( - node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE - ) + mock_compose_env.assert_called_once_with(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) mock_init_op.assert_called_once_with( settings=mock.ANY, compose_env=mock_env, @@ -95,9 +91,7 @@ def test_update_fair_boot( update(valid_env_file, pull_config_for_schain) - mock_compose_env.assert_called_once_with( - node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE - ) + mock_compose_env.assert_called_once_with(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) mock_update_op.assert_called_once_with( settings=mock.ANY, compose_env=mock_env, @@ -127,9 +121,7 @@ def test_migrate_from_boot( migrate_from_boot(valid_env_file) - mock_compose_env.assert_called_once_with( - node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE - ) + mock_compose_env.assert_called_once_with(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) mock_migrate_op.assert_called_once_with( settings=mock.ANY, compose_env=mock_env, @@ -146,19 +138,17 @@ def test_cleanup_success( mock_compose_env, mock_cleanup_fair_op, mock_is_user_valid, - inited_node, resource_alloc, meta_file_v3, active_node_option, + inited_node, ): mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env cleanup(node_mode=NodeMode.ACTIVE) - mock_compose_env.assert_called_once_with( - node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE - ) + mock_compose_env.assert_called_once_with(node_type=NodeType.FAIR, node_mode=NodeMode.ACTIVE) mock_cleanup_fair_op.assert_called_once_with( node_mode=NodeMode.ACTIVE, compose_env=mock_env, prune=False ) @@ -171,10 +161,10 @@ def test_cleanup_calls_operations_in_correct_order( mock_compose_env, mock_cleanup_fair_op, mock_is_user_valid, - inited_node, resource_alloc, meta_file_v3, active_node_option, + inited_node, ): from node_cli.fair.common import cleanup @@ -204,10 +194,10 @@ def test_cleanup_continues_after_fair_op_error( mock_compose_env, mock_cleanup_fair_op, mock_is_user_valid, - inited_node, resource_alloc, meta_file_v3, active_node_option, + inited_node, ): mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env @@ -224,9 +214,9 @@ def test_cleanup_continues_after_fair_op_error( @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=False) def test_cleanup_fails_when_user_invalid( mock_is_user_valid, - inited_node, resource_alloc, meta_file_v3, + inited_node, ): """Test that cleanup fails when user validation fails""" import pytest @@ -253,10 +243,10 @@ def test_cleanup_logs_success_message( mock_compose_env, mock_cleanup_fair_op, mock_is_user_valid, - inited_node, resource_alloc, meta_file_v3, active_node_option, + inited_node, ): mock_env = {'ENV_TYPE': 'devnet'} mock_compose_env.return_value = mock_env From 6447de3be388d859fd4b172bc1f72d5b0dcb2684 Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 19:59:55 +0000 Subject: [PATCH 331/332] fix node tests --- node_cli/fair/common.py | 1 + tests/.skale/config/docker-compose-fair.yml | 5 +++++ tests/core/core_node_test.py | 6 +----- tests/fair/fair_node_test.py | 7 ++++--- 4 files changed, 11 insertions(+), 8 deletions(-) create mode 100644 tests/.skale/config/docker-compose-fair.yml diff --git a/node_cli/fair/common.py b/node_cli/fair/common.py index 37fdba67..98f10a7f 100644 --- a/node_cli/fair/common.py +++ b/node_cli/fair/common.py @@ -76,6 +76,7 @@ def init( print('Fair node is initialized') +@check_inited @check_user def cleanup(node_mode: NodeMode, prune: bool = False) -> None: node_mode = upsert_node_mode(node_mode=node_mode) diff --git a/tests/.skale/config/docker-compose-fair.yml b/tests/.skale/config/docker-compose-fair.yml new file mode 100644 index 00000000..c09f2c08 --- /dev/null +++ b/tests/.skale/config/docker-compose-fair.yml @@ -0,0 +1,5 @@ +services: + test: + container_name: test + image: alpine:latest + network_mode: host diff --git a/tests/core/core_node_test.py b/tests/core/core_node_test.py index b9d66960..b28719d4 100644 --- a/tests/core/core_node_test.py +++ b/tests/core/core_node_test.py @@ -193,11 +193,7 @@ def test_is_base_containers_alive_empty(node_type, node_mode, is_boot): 'fair', ], ) -def test_compose_node_env( - node_type, - node_mode, - expected_mnt_dir, -): +def test_compose_node_env(node_type, node_mode, expected_mnt_dir, regular_user_conf): result_env = compose_node_env( node_type=node_type, node_mode=node_mode, diff --git a/tests/fair/fair_node_test.py b/tests/fair/fair_node_test.py index 28ade830..6554cd3f 100644 --- a/tests/fair/fair_node_test.py +++ b/tests/fair/fair_node_test.py @@ -227,11 +227,12 @@ def test_cleanup_fails_when_user_invalid( cleanup(node_mode=NodeMode.ACTIVE) -def test_cleanup_fails_when_not_inited(ensure_meta_removed, active_node_option): +def test_cleanup_fails_when_not_inited(ensure_meta_removed, active_node_option, fair_user_conf): import pytest - with pytest.raises(SystemExit): - cleanup(node_mode=NodeMode.ACTIVE) + with mock.patch('node_cli.operations.cleanup_fair_op', return_value=None): + with pytest.raises(SystemExit): + cleanup(node_mode=NodeMode.ACTIVE) @mock.patch('node_cli.utils.decorators.is_user_valid', return_value=True) From a31d653d99285702aff26643a999dd82c9dd7a5d Mon Sep 17 00:00:00 2001 From: Dmytro Date: Mon, 16 Feb 2026 20:30:13 +0000 Subject: [PATCH 332/332] fix nftables test --- tests/conftest.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 3d6cc858..8b610d96 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -70,12 +70,6 @@ 'node_cli.configs.TM_INIT_TIMEOUT', 'node_cli.configs.RESTORE_SLEEP_TIMEOUT', 'node_cli.configs.INIT_TIMEOUT', - 'node_cli.core.node.TM_INIT_TIMEOUT', - 'node_cli.core.node.RESTORE_SLEEP_TIMEOUT', - 'node_cli.fair.common.TM_INIT_TIMEOUT', - 'node_cli.fair.common.INIT_TIMEOUT', - 'node_cli.fair.boot.TM_INIT_TIMEOUT', - 'node_cli.fair.active.RESTORE_SLEEP_TIMEOUT', ]