From dcd8a6e68cf0377032cb8af607a2a79dfdd78627 Mon Sep 17 00:00:00 2001 From: surengab Date: Mon, 8 Dec 2025 13:53:12 +0400 Subject: [PATCH 1/2] fix(websocket): prevent NULL pointer crash in error handling --- .../esp_websocket_client/esp_websocket_client.c | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/components/esp_websocket_client/esp_websocket_client.c b/components/esp_websocket_client/esp_websocket_client.c index 21a172d91e..9342977b98 100644 --- a/components/esp_websocket_client/esp_websocket_client.c +++ b/components/esp_websocket_client/esp_websocket_client.c @@ -673,8 +673,9 @@ static int esp_websocket_client_send_with_exact_opcode(esp_websocket_client_hand esp_websocket_free_buf(client, true); esp_tls_error_handle_t error_handle = esp_transport_get_error_handle(client->transport); if (error_handle) { + const char *error_name = esp_err_to_name(error_handle->last_error); esp_websocket_client_error(client, "esp_transport_write() returned %d, transport_error=%s, tls_error_code=%i, tls_flags=%i, errno=%d", - ret, esp_err_to_name(error_handle->last_error), error_handle->esp_tls_error_code, + ret, error_name ? error_name : "UNKNOWN", error_handle->esp_tls_error_code, error_handle->esp_tls_flags, errno); } else { esp_websocket_client_error(client, "esp_transport_write() returned %d, errno=%d", ret, errno); @@ -1002,8 +1003,9 @@ static esp_err_t esp_websocket_client_recv(esp_websocket_client_handle_t client) esp_websocket_free_buf(client, false); esp_tls_error_handle_t error_handle = esp_transport_get_error_handle(client->transport); if (error_handle) { + const char *error_name = esp_err_to_name(error_handle->last_error); esp_websocket_client_error(client, "esp_transport_read() failed with %d, transport_error=%s, tls_error_code=%i, tls_flags=%i, errno=%d", - rlen, esp_err_to_name(error_handle->last_error), error_handle->esp_tls_error_code, + rlen, error_name ? error_name : "UNKNOWN", error_handle->esp_tls_error_code, error_handle->esp_tls_flags, errno); } else { esp_websocket_client_error(client, "esp_transport_read() failed with %d, errno=%d", rlen, errno); @@ -1097,9 +1099,10 @@ static void esp_websocket_client_task(void *pv) esp_tls_error_handle_t error_handle = esp_transport_get_error_handle(client->transport); client->error_handle.esp_ws_handshake_status_code = esp_transport_ws_get_upgrade_request_status(client->transport); if (error_handle) { + const char *error_name = esp_err_to_name(error_handle->last_error); esp_websocket_client_error(client, "esp_transport_connect() failed with %d, " "transport_error=%s, tls_error_code=%i, tls_flags=%i, esp_ws_handshake_status_code=%d, errno=%d", - result, esp_err_to_name(error_handle->last_error), error_handle->esp_tls_error_code, + result, error_name ? error_name : "UNKNOWN", error_handle->esp_tls_error_code, error_handle->esp_tls_flags, client->error_handle.esp_ws_handshake_status_code, errno); } else { esp_websocket_client_error(client, "esp_transport_connect() failed with %d, esp_ws_handshake_status_code=%d, errno=%d", @@ -1204,8 +1207,9 @@ static void esp_websocket_client_task(void *pv) if (read_select < 0) { esp_tls_error_handle_t error_handle = esp_transport_get_error_handle(client->transport); if (error_handle) { + const char *error_name = esp_err_to_name(error_handle->last_error); esp_websocket_client_error(client, "esp_transport_poll_read() returned %d, transport_error=%s, tls_error_code=%i, tls_flags=%i, errno=%d", - read_select, esp_err_to_name(error_handle->last_error), error_handle->esp_tls_error_code, + read_select, error_name ? error_name : "UNKNOWN", error_handle->esp_tls_error_code, error_handle->esp_tls_flags, errno); } else { esp_websocket_client_error(client, "esp_transport_poll_read() returned %d, errno=%d", read_select, errno); From 048b0c553ee128388f65b5301e043e08e906b113 Mon Sep 17 00:00:00 2001 From: surengab Date: Mon, 3 Nov 2025 12:13:43 +0400 Subject: [PATCH 2/2] feat(examples): websocket autobahn test suit integration --- .github/workflows/autobahn__linux-test.yml | 133 +++ .github/workflows/autobahn__target-test.yml | 227 ++++++ .../linux_compat/esp_timer/CMakeLists.txt | 3 +- .../esp_timer/include/esp_timer.h | 1 + .../examples/autobahn-testsuite/README.md | 727 +++++++++++++++++ .../config/fuzzingserver-quick.json | 12 + .../config/fuzzingserver.json | 15 + .../autobahn-testsuite/docker-compose.yml | 12 + .../examples/autobahn-testsuite/run_tests.sh | 262 ++++++ .../scripts/analyze_results.py | 162 ++++ .../scripts/generate_summary.py | 756 ++++++++++++++++++ .../scripts/monitor_serial.py | 213 +++++ .../autobahn-testsuite/scripts/quick_test.sh | 43 + .../scripts/restore_full_tests.sh | 43 + .../autobahn-testsuite/testee/CMakeLists.txt | 20 + .../testee/main/CMakeLists.txt | 10 + .../testee/main/Kconfig.projbuild | 44 + .../testee/main/autobahn_testee.c | 683 ++++++++++++++++ .../testee/main/idf_component.yml | 12 + .../testee/sdkconfig.ci.linux | 26 + .../testee/sdkconfig.ci.target.plain_tcp | 15 + 21 files changed, 3418 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/autobahn__linux-test.yml create mode 100644 .github/workflows/autobahn__target-test.yml create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/README.md create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/config/fuzzingserver-quick.json create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/config/fuzzingserver.json create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/docker-compose.yml create mode 100755 components/esp_websocket_client/examples/autobahn-testsuite/run_tests.sh create mode 100755 components/esp_websocket_client/examples/autobahn-testsuite/scripts/analyze_results.py create mode 100755 components/esp_websocket_client/examples/autobahn-testsuite/scripts/generate_summary.py create mode 100755 components/esp_websocket_client/examples/autobahn-testsuite/scripts/monitor_serial.py create mode 100755 components/esp_websocket_client/examples/autobahn-testsuite/scripts/quick_test.sh create mode 100755 components/esp_websocket_client/examples/autobahn-testsuite/scripts/restore_full_tests.sh create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/testee/CMakeLists.txt create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/testee/main/CMakeLists.txt create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/testee/main/Kconfig.projbuild create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/testee/main/autobahn_testee.c create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/testee/main/idf_component.yml create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/testee/sdkconfig.ci.linux create mode 100644 components/esp_websocket_client/examples/autobahn-testsuite/testee/sdkconfig.ci.target.plain_tcp diff --git a/.github/workflows/autobahn__linux-test.yml b/.github/workflows/autobahn__linux-test.yml new file mode 100644 index 0000000000..175392bfb2 --- /dev/null +++ b/.github/workflows/autobahn__linux-test.yml @@ -0,0 +1,133 @@ +name: "autobahn: build/linux-tests" + +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened, labeled] + +jobs: + linux_autobahn: + runs-on: ubuntu-22.04 + if: github.event_name == 'push' || (github.event_name == 'pull_request' && contains(github.event.pull_request.labels.*.name, 'autobahn')) + + env: + TEST_DIR: components/esp_websocket_client/examples/autobahn-testsuite + TESTEE_DIR: components/esp_websocket_client/examples/autobahn-testsuite/testee + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + submodules: recursive + + - name: Start Autobahn Fuzzing Server + run: | + mkdir -p ${{ env.TEST_DIR }}/reports/clients + + HOST_IP=$(ip route get 8.8.8.8 | grep -oP 'src \K\S+' || hostname -I | awk '{print $1}' || echo "172.17.0.1") + echo "Host IP address: $HOST_IP" + echo "HOST_IP=$HOST_IP" >> $GITHUB_ENV + + docker run -d \ + --name fuzzing-server \ + --network host \ + -v ${{ github.workspace }}/${{ env.TEST_DIR }}/config:/config:ro \ + -v ${{ github.workspace }}/${{ env.TEST_DIR }}/reports:/reports \ + crossbario/autobahn-testsuite:latest \ + wstest -m fuzzingserver -s /config/fuzzingserver.json + + echo "Waiting for fuzzing server..." + for i in {1..5}; do + if curl -f http://localhost:9001/info >/dev/null 2>&1; then + echo "Fuzzing server ready!" + exit 0 + fi + echo "Attempt $i/5 – waiting 2s..." + sleep 2 + done + + echo "Server start failed. Container logs:" + docker logs fuzzing-server + exit 1 + + - name: Build testee (Linux target) + working-directory: ${{ env.TESTEE_DIR }} + run: | + docker run --rm \ + -v ${{ github.workspace }}:/work \ + -w /work/${{ env.TESTEE_DIR }} \ + espressif/idf:latest \ + bash -c " + . \$IDF_PATH/export.sh + cp sdkconfig.ci.linux sdkconfig.defaults + echo 'Building...' + idf.py build + " + + - name: Verify fuzzing server connectivity + run: | + HOST_IP=${HOST_IP:-$(ip route get 8.8.8.8 | grep -oP 'src \K\S+' || hostname -I | awk '{print $1}' || echo "172.17.0.1")} + echo "Testing connectivity to $HOST_IP:9001" + + docker run --rm \ + --network host \ + espressif/idf:latest \ + bash -c " + curl -f http://$HOST_IP:9001/info || { + echo 'ERROR: Cannot connect to fuzzing server at $HOST_IP:9001' + exit 1 + } + echo 'Fuzzing server is accessible' + " + + - name: Run Autobahn tests + run: | + docker run --rm \ + --network host \ + -v ${{ github.workspace }}:/work \ + -w /work/${{ env.TESTEE_DIR }}/build \ + espressif/idf:latest \ + bash -c " + apt-get update && apt-get install -y file curl net-tools || true + + HOST_IP=\$(ip route get 8.8.8.8 2>/dev/null | grep -oP 'src \\K\\S+' || hostname -I | awk '{print \$1}' || echo '172.17.0.1') + curl -f http://\${HOST_IP}:9001/info || { + echo \"ERROR: Server not reachable at \${HOST_IP}:9001\" + exit 1 + } + + echo 'Running autobahn_testee.elf' + WS_URI=\"ws://\${HOST_IP}:9001\" + echo \"WebSocket URI: \${WS_URI}\" + (sleep 0.5; printf \"\${WS_URI}\\n\") | timeout 30m ./autobahn_testee.elf || { + EXIT_CODE=\$? + echo 'Test failed' + exit \$EXIT_CODE + } + echo 'All Autobahn tests passed!' + " + + - name: Show reports + if: always() + working-directory: ${{ env.TEST_DIR }} + run: | + if [ -d reports/clients ]; then + ls -la reports/clients/ + else + echo "No reports" + fi + + - name: Generate summary + if: always() + working-directory: ${{ env.TEST_DIR }} + run: python3 scripts/generate_summary.py || true + + - name: Upload reports + if: always() + uses: actions/upload-artifact@v4 + with: + name: autobahn-reports-linux-${{ github.run_id }} + path: ${{ env.TEST_DIR }}/reports/** + if-no-files-found: warn diff --git a/.github/workflows/autobahn__target-test.yml b/.github/workflows/autobahn__target-test.yml new file mode 100644 index 0000000000..9b2561b3eb --- /dev/null +++ b/.github/workflows/autobahn__target-test.yml @@ -0,0 +1,227 @@ +name: "autobahn: build/target-tests" + +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened, labeled] + +jobs: + build_autobahn: + # Run on push to master or if PR has 'websocket' label + if: contains(github.event.pull_request.labels.*.name, 'autobahn') || github.event_name == 'push' + name: Build + strategy: + matrix: + #idf_ver: ["release-v5.0", "release-v5.1", "release-v5.2", "release-v5.3", "latest"] + idf_ver: [ "latest"] + idf_target: ["esp32"] + runs-on: ubuntu-22.04 + container: espressif/idf:${{ matrix.idf_ver }} + env: + TEST_DIR: components/esp_websocket_client/examples/autobahn-testsuite/testee + steps: + - name: Checkout esp-protocols + uses: actions/checkout@v4 + with: + submodules: recursive + - name: Build autobahn testee with IDF-${{ matrix.idf_ver }} for ${{ matrix.idf_target }} + working-directory: ${{ env.TEST_DIR }} + env: + IDF_TARGET: ${{ matrix.idf_target }} + shell: bash + run: | + . ${IDF_PATH}/export.sh + test -f sdkconfig.ci.target.plain_tcp && cat sdkconfig.ci.target.plain_tcp >> sdkconfig.defaults || echo "No sdkconfig.ci.plain_tcp" + idf.py set-target ${{ matrix.idf_target }} + idf.py build + - name: Merge binaries with IDF-${{ matrix.idf_ver }} for ${{ matrix.idf_target }} + working-directory: ${{ env.TEST_DIR }}/build + env: + IDF_TARGET: ${{ matrix.idf_target }} + shell: bash + run: | + . ${IDF_PATH}/export.sh + esptool.py --chip ${{ matrix.idf_target }} merge_bin --fill-flash-size 4MB -o flash_image.bin @flash_args + - uses: actions/upload-artifact@v4 + with: + name: autobahn_testee_bin_${{ matrix.idf_target }}_${{ matrix.idf_ver }} + path: | + ${{ env.TEST_DIR }}/build/bootloader/bootloader.bin + ${{ env.TEST_DIR }}/build/partition_table/partition-table.bin + ${{ env.TEST_DIR }}/build/*.bin + ${{ env.TEST_DIR }}/build/*.elf + ${{ env.TEST_DIR }}/build/flasher_args.json + ${{ env.TEST_DIR }}/build/config/sdkconfig.h + ${{ env.TEST_DIR }}/build/config/sdkconfig.json + if-no-files-found: error + +# run-target-autobahn: +# # Skip running on forks since it won't have access to secrets +# if: | +# github.repository == 'espressif/esp-protocols' && +# ( contains(github.event.pull_request.labels.*.name, 'autobahn') || github.event_name == 'push' ) +# name: Target test +# needs: build_autobahn +# strategy: +# fail-fast: false +# matrix: +# idf_ver: ["latest"] +# idf_target: ["esp32"] +# runs-on: +# - self-hosted +# - ESP32-ETHERNET-KIT +# env: +# TEST_DIR: components/esp_websocket_client/examples/autobahn-testsuite +# TESTEE_DIR: components/esp_websocket_client/examples/autobahn-testsuite/testee +# steps: +# - uses: actions/checkout@v4 +# with: +# submodules: recursive +# - uses: actions/download-artifact@v4 +# with: +# name: autobahn_testee_bin_${{ matrix.idf_target }}_${{ matrix.idf_ver }} +# path: ${{ env.TESTEE_DIR }}/build +# - name: Install Docker Compose +# run: | +# sudo apt-get update +# sudo apt-get install -y docker-compose-plugin || sudo apt-get install -y docker-compose +# # Ensure user has permission to use Docker (if not already in docker group) +# sudo usermod -aG docker $USER || true +# # Start Docker service if not running +# sudo systemctl start docker || true +# - name: Start Autobahn Fuzzing Server +# working-directory: ${{ env.TEST_DIR }} +# run: | +# # Get host IP address for ESP32 to connect to +# HOST_IP=$(hostname -I | awk '{print $1}') +# echo "HOST_IP=$HOST_IP" >> $GITHUB_ENV +# echo "Autobahn server will be accessible at ws://$HOST_IP:9001" +# +# # Start the fuzzing server using pre-built image +# # For CI, we may need to specify platform if architecture differs +# echo "Starting Autobahn fuzzing server..." +# # Set platform for CI if needed (uncomment if you get exec format error) +# # export DOCKER_DEFAULT_PLATFORM=linux/amd64 +# docker compose up -d || docker-compose up -d +# +# # Wait for server to be ready +# echo "Waiting for fuzzing server to start..." +# sleep 10 +# +# # Check if container is running and healthy +# if ! docker ps | grep -q ws-fuzzing-server; then +# echo "Error: Fuzzing server failed to start" +# echo "Container logs:" +# docker compose logs || docker-compose logs +# echo "Checking available Python executables in container:" +# docker compose run --rm fuzzing-server which python python3 || true +# exit 1 +# fi +# +# # Verify the server is actually responding +# echo "Checking if server is responding..." +# sleep 5 +# if ! curl -s http://localhost:8080 > /dev/null 2>&1; then +# echo "Warning: Server may not be fully ready, but container is running" +# docker compose logs --tail=20 || docker-compose logs --tail=20 +# fi +# +# echo "✓ Fuzzing server started successfully" +# - name: Flash ESP32 Testee +# working-directory: ${{ env.TESTEE_DIR }}/build +# env: +# IDF_TARGET: ${{ matrix.idf_target }} +# run: | +# python -m esptool --chip ${{ matrix.idf_target }} write_flash 0x0 flash_image.bin +# - name: Run Autobahn Tests +# working-directory: ${{ env.TESTEE_DIR }} +# env: +# PIP_EXTRA_INDEX_URL: "https://www.piwheels.org/simple" +# run: | +# # Detect ESP32 port if not set in environment +# if [ -z "${ESP_PORT:-}" ]; then +# for port in /dev/ttyUSB* /dev/ttyACM*; do +# if [ -e "$port" ]; then +# export ESP_PORT="$port" +# echo "Detected ESP32 port: $ESP_PORT" +# break +# fi +# done +# fi +# +# # Default to /dev/ttyUSB0 if still not found +# export ESP_PORT="${ESP_PORT:-/dev/ttyUSB0}" +# +# if [ ! -e "$ESP_PORT" ]; then +# echo "Error: ESP32 port not found. Please set ESP_PORT environment variable." +# echo "Available ports:" +# ls -la /dev/tty* || true +# exit 1 +# fi +# +# echo "Using ESP32 port: $ESP_PORT" +# export PYENV_ROOT="$HOME/.pyenv" +# export PATH="$PYENV_ROOT/bin:$PATH" +# eval "$(pyenv init --path)" +# eval "$(pyenv init -)" +# if ! pyenv versions --bare | grep -q '^3\.12\.6$'; then +# echo "Installing Python 3.12.6..." +# pyenv install -s 3.12.6 +# fi +# if ! pyenv virtualenvs --bare | grep -q '^myenv$'; then +# echo "Creating pyenv virtualenv 'myenv'..." +# pyenv virtualenv 3.12.6 myenv +# fi +# pyenv activate myenv +# python --version +# pip install --prefer-binary pytest-embedded pytest-embedded-serial-esp pytest-embedded-idf pytest-custom_exit_code esptool pyserial +# pip install --extra-index-url https://dl.espressif.com/pypi/ -r $GITHUB_WORKSPACE/ci/requirements.txt +# +# echo "Starting Autobahn test suite on ESP32..." +# echo "Tests may take 15-30 minutes to complete..." +# +# # Send server URI via serial (stdin) and monitor for completion +# # Script is in the parent directory (TEST_DIR) from TESTEE_DIR +# SERVER_URI="ws://$HOST_IP:9001" +# echo "Sending server URI to ESP32: $SERVER_URI" +# python3 ../scripts/monitor_serial.py --port "$ESP_PORT" --uri "$SERVER_URI" --timeout 2400 +# - name: Collect Test Reports +# working-directory: ${{ env.TEST_DIR }} +# if: always() +# run: | +# # Stop the fuzzing server +# docker compose down || docker-compose down +# +# # Check if reports were generated +# if [ -d "reports/clients" ]; then +# echo "✓ Test reports found" +# ls -la reports/clients/ +# else +# echo "⚠ No test reports found in reports/clients/" +# fi +# - name: Generate Test Summary +# working-directory: ${{ env.TEST_DIR }} +# if: always() +# run: | +# # Generate summary from test results +# # Check for JSON files in both reports/ and reports/clients/ +# if [ -d "reports" ] && ( [ -n "$(ls -A reports/*.json 2>/dev/null)" ] || [ -n "$(ls -A reports/clients/*.json 2>/dev/null)" ] ); then +# echo "Generating test summary..." +# python3 scripts/generate_summary.py +# echo "" +# echo "Summary generated successfully!" +# if [ -f "reports/summary.html" ]; then +# echo "HTML summary available at: reports/summary.html" +# fi +# else +# echo "⚠ No JSON test results found, skipping summary generation" +# fi +# - uses: actions/upload-artifact@v4 +# if: always() +# with: +# name: autobahn_reports_${{ matrix.idf_target }}_${{ matrix.idf_ver }} +# path: | +# ${{ env.TEST_DIR }}/reports/** +# if-no-files-found: warn diff --git a/common_components/linux_compat/esp_timer/CMakeLists.txt b/common_components/linux_compat/esp_timer/CMakeLists.txt index 0e47f3df91..1daea6fabf 100644 --- a/common_components/linux_compat/esp_timer/CMakeLists.txt +++ b/common_components/linux_compat/esp_timer/CMakeLists.txt @@ -1,5 +1,6 @@ idf_component_register(SRCS esp_timer_linux.c timer_task.cpp - INCLUDE_DIRS include) + INCLUDE_DIRS include + REQUIRES esp_common) set_target_properties(${COMPONENT_LIB} PROPERTIES CXX_STANDARD 17 diff --git a/common_components/linux_compat/esp_timer/include/esp_timer.h b/common_components/linux_compat/esp_timer/include/esp_timer.h index 8267e403d2..b6389dccfc 100644 --- a/common_components/linux_compat/esp_timer/include/esp_timer.h +++ b/common_components/linux_compat/esp_timer/include/esp_timer.h @@ -8,6 +8,7 @@ #include #include #include "bsd/string.h" +#include "esp_err.h" #ifdef __cplusplus extern "C" { diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/README.md b/components/esp_websocket_client/examples/autobahn-testsuite/README.md new file mode 100644 index 0000000000..fccc21decd --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/README.md @@ -0,0 +1,727 @@ +# Autobahn WebSocket Testsuite for esp_websocket_client + +This directory contains the setup for testing `esp_websocket_client` against the industry-standard [Autobahn WebSocket Testsuite](https://github.com/crossbario/autobahn-testsuite). + +The Autobahn Testsuite is the de facto standard for testing WebSocket protocol compliance. It runs over 500 test cases covering: +- Frame parsing and generation +- Text and binary messages +- Fragmentation +- Control frames (PING, PONG, CLOSE) +- UTF-8 validation +- Protocol violations +- Edge cases and error handling + +## 📋 Prerequisites + +1. **Docker** - For running the Autobahn testsuite server + - **Apple Silicon Macs (M1/M2/M3)**: The image runs via Rosetta 2 emulation (already configured) + - **Intel Macs / Linux**: Native support +2. **ESP32 device** with WiFi capability +3. **ESP-IDF** development environment +4. **Network** - ESP32 and Docker host on the same network + +## 🚀 Quick Start + +### Step 1: Start the Autobahn Fuzzing Server + +```bash +cd autobahn-testsuite +docker-compose up +``` + +This will: +- Start the Autobahn fuzzing server on port 9001 +- Start a web server on port 8080 for viewing reports +- Mount the `reports/` directory for test results + +You should see output like: +``` +Autobahn WebSockets 0.7.4/0.10.9 Fuzzing Server (Port 9001) +Ok, will run 521 test cases for any clients connecting +``` + +### Step 2: Configure the ESP32 Testee Client + +1. Find your Docker host IP address: + ```bash + # On Linux/Mac + ifconfig + # or + ip addr show + + # Look for your local network IP (e.g., 192.168.1.100) + ``` + +2. Update the configuration: + ```bash + cd testee + idf.py menuconfig + ``` + +3. Configure: + - **Example Connection Configuration** → Set your WiFi SSID and password + - **Autobahn Testsuite Configuration** → Set the server URI (e.g., `ws://192.168.1.100:9001`) + + Or edit `sdkconfig.defaults` directly: + ``` + CONFIG_EXAMPLE_WIFI_SSID="YourWiFiSSID" + CONFIG_EXAMPLE_WIFI_PASSWORD="YourWiFiPassword" + CONFIG_AUTOBAHN_SERVER_URI="ws://192.168.1.100:9001" + ``` + +### Step 3: Build and Flash the Testee + +**Option A: Automated (Recommended)** + +Use the `run_tests.sh` script for full automation: + +```bash +# From autobahn-testsuite directory +./run_tests.sh /dev/ttyUSB0 +``` + +This will: +- Build the testee +- Flash to ESP32 +- Monitor serial output +- Detect test completion automatically +- Open results in browser + +**Option B: Manual** + +```bash +cd testee + +# Build +idf.py build + +# Flash and monitor +idf.py -p /dev/ttyUSB0 flash monitor +``` + +Replace `/dev/ttyUSB0` with your ESP32's serial port. + +**Using Pre-configured CI Builds** + +For CI/CD or optimized builds, use the pre-configured sdkconfig files: + +```bash +cd testee + +# For Linux target +idf.py set-target linux +cp sdkconfig.ci.linux sdkconfig.defaults +idf.py build + +# For ESP32 hardware target +idf.py set-target esp32 +cp sdkconfig.ci.target.plain_tcp sdkconfig.defaults +# Configure WiFi/Ethernet, then: +idf.py build +``` + +### Step 4: Watch the Tests Run + +The ESP32 will: +1. Connect to WiFi +2. Query the fuzzing server for the number of test cases +3. Run each test case sequentially +4. Echo back all received messages (as required by the testsuite) +5. Generate a final report + +You'll see output like: +``` +I (12345) autobahn_testee: ========== Test Case 1/300 ========== +I (12346) autobahn_testee: Running test case 1: ws://192.168.1.100:9001/runCase?case=1&agent=esp_websocket_client +I (12450) autobahn_testee: WEBSOCKET_EVENT_CONNECTED +... +I (12550) autobahn_testee: Test case 1 completed +``` + +### Step 5: View the Results + +Once all tests complete, view the HTML report: + +**Option 1: Web Interface** +1. Open your web browser to: `http://localhost:8080` (not port 9001 - that's the WebSocket server) +2. Click on the **"Client Reports"** link on the page +3. You'll see a comprehensive breakdown of all test results: + +**Option 2: Generate Summary Report (Recommended)** +```bash +cd autobahn-testsuite +python3 scripts/generate_summary.py +``` +This generates a detailed console summary and an HTML summary at `reports/summary.html`. + +**Option 3: Direct File Access** +The report is saved in `autobahn-testsuite/reports/clients/` directory. Open it directly: + +```bash +# macOS +open reports/clients/index.html + +# Linux +xdg-open reports/clients/index.html + +# Or manually navigate to the file in your file browser +# Path: autobahn-testsuite/reports/clients/index.html +``` + +## 📂 Directory Structure + +``` +autobahn-testsuite/ +├── docker-compose.yml # Docker configuration +├── run_tests.sh # Automated test runner script +├── config/ +│ └── fuzzingserver.json # Testsuite server configuration +├── scripts/ # Automation scripts +│ ├── monitor_serial.py # Serial monitor with completion detection +│ ├── analyze_results.py # Result analysis tools +│ └── generate_summary.py # Generate test summary reports +├── reports/ # Generated test reports (created automatically) +│ └── clients/ +│ └── index.html # Main report page +├── testee/ # ESP32 testee client project +│ ├── CMakeLists.txt +│ ├── sdkconfig.defaults # Default configuration +│ ├── sdkconfig.ci.linux # CI config for Linux target builds +│ ├── sdkconfig.ci.target.plain_tcp # CI config for ESP32 hardware (Ethernet) +│ └── main/ +│ ├── autobahn_testee.c # Main testee implementation +│ ├── CMakeLists.txt +│ └── Kconfig.projbuild +└── README.md # This file +``` + +## ⚙️ Configuration + +### Testee Configuration + +The testee project includes several pre-configured sdkconfig files: + +#### 1. `sdkconfig.defaults` - Default Configuration +Base configuration for manual testing. Edit this file to set: +- WiFi credentials (`CONFIG_EXAMPLE_WIFI_SSID`, `CONFIG_EXAMPLE_WIFI_PASSWORD`) +- Server URI (`CONFIG_AUTOBAHN_SERVER_URI`) +- Other test-specific settings + +#### 2. `sdkconfig.ci.linux` - Linux Target Configuration +Pre-configured for Linux host builds (used in CI/CD): + +```bash +cd testee +idf.py set-target linux +cp sdkconfig.ci.linux sdkconfig.defaults +idf.py build +``` + +**Features:** +- Linux target (`CONFIG_IDF_TARGET_LINUX=y`) +- URI from stdin (`CONFIG_WEBSOCKET_URI_FROM_STDIN=y`) +- Optimized buffer sizes for testing +- Separate TX lock enabled + +#### 3. `sdkconfig.ci.target.plain_tcp` - ESP32 Hardware Configuration +Pre-configured for ESP32 hardware with Ethernet (used in CI/CD): + +```bash +cd testee +idf.py set-target esp32 +cp sdkconfig.ci.target.plain_tcp sdkconfig.defaults +# Add your WiFi/Ethernet config +idf.py build +``` + +**Features:** +- ESP32 target +- URI from stdin (`CONFIG_WEBSOCKET_URI_FROM_STDIN=y`) +- Ethernet support (can be configured for WiFi) +- Plain TCP (no TLS) + +**Note**: These CI configs are optimized for automated testing. For manual testing, use `sdkconfig.defaults` and configure via `idf.py menuconfig`. + +### Fuzzing Server Configuration + +Edit `config/fuzzingserver.json` to customize test behavior: + +```json +{ + "url": "ws://0.0.0.0:9001", + "outdir": "/reports", + "cases": ["*"], + "exclude-cases": [ + "9.*", // Excludes performance/mass tests + "12.*", // Excludes compression tests (permessage-deflate) + "13.*" // Excludes compression tests + ] +} +``` + +**Note**: Test cases 12.* and 13.* are excluded by default as they test WebSocket compression (RFC7692), which is typically not implemented in embedded clients. + +### Test Case Categories + +The Autobahn Testsuite is organized into test categories, each focusing on a specific aspect of the WebSocket protocol (RFC 6455). Here's a detailed breakdown: + +#### **1.* - Framing** (~64 tests, ~3 minutes) +**Critical for**: Core protocol compliance + +Tests the fundamental WebSocket frame structure and parsing: +- Valid frame structures (FIN, RSV bits, opcode, mask, payload length) +- Frame masking requirements (clients must mask, servers must not) +- Payload length encoding (7-bit, 16-bit, 64-bit formats) +- Reserved bits validation (RSV1, RSV2, RSV3 must be 0 unless extension negotiated) +- Frame boundary detection + +**Common issues**: Incorrect masking behavior, wrong payload length calculation, not validating reserved bits + +--- + +#### **2.* - Pings/Pongs** (~11 tests, ~1 minute) +**Critical for**: Connection keepalive, detecting stale connections + +Tests control frame handling for connection management: +- PING frames must be automatically replied with PONG +- PONG frames can be unsolicited (sent proactively) +- PING payload must be echoed exactly in PONG response +- PING/PONG handling during message fragmentation +- Large PING payloads (up to 125 bytes) + +**Common issues**: Not responding to PING automatically, modifying PING payload in PONG, not handling PING during fragmented messages + +--- + +#### **3.* - Reserved Bits** (~7 tests, ~1 minute) +**Critical for**: Future protocol extensions (e.g., compression) + +Tests validation of reserved frame header bits: +- RSV1, RSV2, RSV3 bits must be 0 unless extension negotiated +- Frames with reserved bits set should be rejected +- Connection must close with proper error code (1002 - protocol error) + +**Common issues**: Not validating reserved bits, accepting frames with RSV bits set when no extension negotiated + +--- + +#### **4.* - Opcodes** (~10 tests, ~1 minute) +**Critical for**: Protocol compliance, future extensions + +Tests valid and invalid opcode handling: +- **Valid opcodes**: 0x0 (continuation), 0x1 (text), 0x2 (binary), 0x8 (close), 0x9 (ping), 0xA (pong) +- **Invalid opcodes**: 0x3-0x7 (reserved data), 0xB-0xF (reserved control) +- Connection must close on invalid opcode + +**Common issues**: Accepting reserved opcodes, not closing connection on invalid opcode + +--- + +#### **5.* - Fragmentation** (~20 tests, ~2 minutes) +**Critical for**: Handling large messages, streaming data + +Tests message fragmentation and continuation frames: +- Single fragment messages (FIN=1) +- Multiple fragment messages (FIN=0, then FIN=1) +- Continuation frames (opcode 0x0) +- Interleaving control frames (PING/PONG/CLOSE) during fragmentation +- Fragmentation of both text and binary messages + +**Common issues**: Not properly tracking fragmentation state, rejecting control frames during fragmentation (they're allowed!), starting new message before previous completed + +--- + +#### **6.* - UTF-8 Handling** (~150+ tests, ~10 minutes) +**Important for**: Security, preventing injection attacks +**Often excluded**: Many embedded implementations skip strict UTF-8 validation + +Tests strict UTF-8 validation for TEXT frames: +- Valid UTF-8 sequences +- Invalid UTF-8: wrong start bytes, invalid continuations +- Overlong encodings (security issue - can bypass validation) +- UTF-8 boundaries in fragmented messages +- Invalid code points (surrogates, beyond U+10FFFF) + +**Common issues**: Not validating UTF-8 at all, accepting invalid UTF-8 sequences, not handling UTF-8 boundaries in fragments correctly + +**Note**: Many embedded implementations skip strict UTF-8 validation due to memory/CPU constraints. This is acceptable for trusted environments but risky for public-facing services. + +--- + +#### **7.* - Close Handshake** (~35 tests, ~3 minutes) +**Critical for**: Clean disconnection, resource cleanup + +Tests proper connection closing: +- Close frame structure (2-byte status code + optional UTF-8 reason) +- Valid and invalid close codes (1000-1015, plus some reserved ranges) +- Close frame must be echoed back +- No data frames allowed after close frame +- Close during message fragmentation +- Close frame payload validation (must be valid UTF-8 if present) + +**Common issues**: Not echoing close frame, accepting invalid close codes, not validating close payload + +--- + +#### **9.* - Limits/Performance** (~30 tests, 60+ minutes) +**Usually excluded**: Time-consuming and memory-intensive +**Why excluded**: Not practical for embedded devices + +Tests performance under stress: +- Large message sizes (64KB, 256KB, 1MB, 16MB) +- Many small messages (1000+ messages in rapid succession) +- Rapid connection/disconnection cycles +- Binary message performance at scale + +**Why section 9 is excluded**: +1. **Time consuming**: These tests can take hours to complete, making them impractical for regular testing +2. **Memory constraints**: Embedded devices (like ESP32) have limited RAM. Tests with 16MB messages will fail or cause crashes +3. **Not always relevant**: Resource-constrained devices are not expected to handle multi-megabyte messages or thousands of rapid connections +4. **Development workflow**: Excluding these allows faster iteration during development while still testing core protocol compliance + +The `quick_test.sh` script and default `fuzzingserver.json` both exclude section 9.* for these reasons. If you need to test performance limits, you can manually include them, but be prepared for very long test runs. + +--- + +#### **10.* - Miscellaneous** (~10 tests, ~2 minutes) +**Critical for**: Robustness in real-world conditions + +Tests various edge cases: +- Delayed frame sending +- Slow message delivery +- Network behavior simulation +- Timeout handling + +--- + +#### **12.* & 13.* - WebSocket Compression** (~200+ tests, ~30 minutes total) +**Usually excluded**: Optional extension, not commonly implemented +**Why excluded**: RFC 7692 (permessage-deflate) is optional + +Tests WebSocket compression extension (RFC 7692): +- Deflate compression parameters negotiation +- Context takeover (reusing compression context) +- Compression with message fragmentation +- Invalid compression data handling +- Compression performance + +**Why excluded**: +1. **Optional extension**: RFC 7692 is not required for WebSocket compliance +2. **Complexity**: Adds significant implementation complexity and memory overhead +3. **Rarely implemented**: Most embedded WebSocket clients don't implement compression +4. **Limited benefit**: Compression is rarely needed on local networks where embedded devices typically operate + +--- + +### Test Configuration Summary + +| Category | Tests | Time | Critical? | Default Exclusion | +|----------|-------|------|-----------|-------------------| +| 1.* | ~64 | 3m | ✅ Yes | No | +| 2.* | ~11 | 1m | ✅ Yes | No | +| 3.* | ~7 | 1m | ✅ Yes | No | +| 4.* | ~10 | 1m | ✅ Yes | No | +| 5.* | ~20 | 2m | ✅ Yes | No | +| 6.* | ~150 | 10m | ⚠️ Optional | Sometimes (UTF-8) | +| 7.* | ~35 | 3m | ✅ Yes | No | +| 9.* | ~30 | 60m+ | ⚠️ Optional | **Yes (Performance)** | +| 10.* | ~10 | 2m | ✅ Yes | No | +| 12.* | ~100 | 15m | ❌ No | Yes (Compression) | +| 13.* | ~100 | 15m | ❌ No | Yes (Compression) | + +**Legend**: +- ✅ **Critical**: Essential for protocol compliance +- ⚠️ **Optional**: Nice to have but often excluded for practical reasons +- ❌ **Not applicable**: Optional extension, rarely implemented + +### Recommended Test Configurations + +**Quick Test** (used by `quick_test.sh`): +- Includes: 1.*, 2.*, 3.*, 4.*, 5.*, 7.*, 10.* +- Excludes: 6.* (UTF-8), 9.* (Performance), 12.*, 13.* (Compression) +- **~150 tests, 5-10 minutes** + +**Standard Test** (default `fuzzingserver.json`): +- Includes: All categories +- Excludes: 9.* (Performance), 12.*, 13.* (Compression) +- **~300 tests, 15-30 minutes** + +**Full Compliance Test**: +- Includes: All categories +- Excludes: None +- **~500+ tests, 1-2 hours** (includes performance and compression) + +## 🔧 Troubleshooting + +### Apple Silicon Mac (M1/M2/M3) Platform Warning + +If you see a warning like: +``` +The requested image's platform (linux/amd64) does not match the detected host platform (linux/arm64/v8) +``` + +**This is normal and expected!** The Autobahn testsuite image only supports amd64, but Docker Desktop for Mac will automatically run it through Rosetta 2 emulation. The `platform: linux/amd64` line in `docker-compose.yml` handles this. + +**Note**: There may be a slight performance overhead due to emulation, but it's typically not noticeable for this use case. + +### ESP32 Can't Connect to Server + +1. Verify Docker host IP is correct: + ```bash + docker inspect ws-fuzzing-server | grep IPAddress + ``` + +2. Check firewall rules allow connections on port 9001 + +3. Ensure ESP32 and Docker host are on same network + +4. Test connectivity: + ```bash + # From another machine on same network + curl -i -N -H "Connection: Upgrade" -H "Upgrade: websocket" \ + -H "Sec-WebSocket-Key: SGVsbG8sIHdvcmxkIQ==" \ + -H "Sec-WebSocket-Version: 13" \ + http://:9001/getCaseCount + ``` + +### Tests Timeout or Hang + +1. Increase network timeout in `autobahn_testee.c`: + ```c + websocket_cfg.network_timeout_ms = 30000; // 30 seconds + ``` + +2. Check WiFi signal strength + +3. Monitor memory usage - insufficient heap can cause issues + +4. Check if ESP32 is still responding: + ```bash + # Try to see serial output + idf.py -p /dev/ttyUSB0 monitor + ``` + +### No Serial Output After Flash + +1. **Check serial port**: + ```bash + # macOS + ls /dev/cu.* + + # Linux + ls /dev/ttyUSB* /dev/ttyACM* + ``` + +2. **Press RESET button** on ESP32 - it may need a reset after flashing + +3. **Check baud rate** - should be 115200 + +4. **Verify flash succeeded**: + ```bash + idf.py -p /dev/ttyUSB0 flash + # Should see "Hash of data verified" message + ``` + +### Finding Your Serial Port + +**macOS:** +```bash +ls /dev/cu.* | grep -i usb +# Common: /dev/cu.usbserial-10, /dev/cu.SLAB_USBtoUART +``` + +**Linux:** +```bash +ls /dev/ttyUSB* /dev/ttyACM* +# Common: /dev/ttyUSB0, /dev/ttyACM0 +``` + +**Windows:** +- Check Device Manager → Ports (COM & LPT) +- Use COM port number (e.g., `COM3`) + +### Some Tests Fail + +This is normal! The testsuite is very strict and tests edge cases. Common issues: + +- **UTF-8 validation** - May fail if not strictly validating text frames +- **Close frame handling** - May fail if close reason not properly echoed +- **Reserved bits** - May fail if not properly validating reserved bits + +Review the HTML report to understand specific failures and determine if they're critical. + +## 🛠️ Automation Scripts + +The `scripts/` directory contains helpful automation tools: + +- **`monitor_serial.py`** - Monitors ESP32 serial output and detects test completion automatically + - Useful for CI/CD pipelines + - Supports timeout and completion pattern detection + - Can send server URI via serial for `CONFIG_WEBSOCKET_URI_FROM_STDIN` builds + +- **`generate_summary.py`** - Generates detailed summary reports from test results + - Analyzes HTML/JSON reports + - Provides pass/fail statistics + - Useful for tracking compliance over time + +- **`analyze_results.py`** - Analyzes test results and identifies patterns + - Helps identify common failure modes + - Useful for debugging protocol issues + +- **`run_tests.sh`** - Main automation script (in root directory) + - Handles full test workflow: build, flash, monitor, results + +## ⏱️ Test Duration + +Typical test execution times: +- **Full test suite (300+ cases)**: 20-40 minutes +- **Quick test (cases 1-16)**: 2-5 minutes +- **Individual test case**: 1-10 seconds + +Factors affecting duration: +- Network latency +- WiFi signal strength +- ESP32 processing speed +- Test case complexity + +## 🔄 Common Workflows + +### Quick Test Run +Test a subset of cases quickly: +```bash +# Edit config/fuzzingserver.json to include only specific cases +# Then run normally +./run_tests.sh /dev/ttyUSB0 +``` + +### Re-run After Code Changes +```bash +# Just rebuild and flash (no need to restart server) +cd testee +idf.py build +idf.py -p /dev/ttyUSB0 flash +# Monitor manually or use monitor_serial.py +``` + +### View Results Without Re-running + +**Option 1: Web Interface (if server is running)** +```bash +# Open the Autobahn web interface +open http://localhost:8080 + +# Click on "Client Reports" link to view test results +``` + +**Option 2: Generate Summary Report (Recommended)** +```bash +# From the autobahn-testsuite directory +cd autobahn-testsuite +python3 scripts/generate_summary.py + +# The script automatically: +# - Finds reports in the reports/ directory +# - Generates a console summary with statistics +# - Creates an HTML summary at reports/summary.html +# - Opens the summary in your browser + +# Output includes: +# - Overall pass/fail statistics +# - Category breakdown (Framing, Ping/Pong, etc.) +# - List of failed tests with reasons +# - Recommendations for improvement +``` + +**Option 3: View Reports Directly** +```bash +# Open the JSON report directly +open reports/clients/index.html + +# Or view the generated summary +open reports/summary.html +``` + +### Stop and Restart Tests +If you need to interrupt tests: +1. Press `Ctrl+C` in the monitor +2. The server will keep running +3. Simply re-flash and restart - the server will continue from where it left off + +## 📊 Understanding Results + +### Result Categories + +- **Passed**: Implementation is compliant +- **Non-Strict**: Minor deviation, usually acceptable +- **Failed**: Protocol violation detected +- **Informational**: Observation about behavior + +### What to Focus On + +**Critical (should pass):** +- Framing tests (1.*) +- Basic opcodes (4.*) +- Close handling (7.*) + +**Important (should mostly pass):** +- Pings/Pongs (2.*) +- Fragmentation (5.*) +- UTF-8 handling (6.*) + +**Acceptable to have issues:** +- Reserved bits (3.*) - if not validating +- Performance tests (9.*) - often excluded +- Compression (12.*, 13.*) - typically not implemented + +### Common Issues + +1. **Text frame UTF-8 validation** + - Testsuite sends invalid UTF-8 in text frames + - Client should reject these + +2. **Close frame payload** + - Close frames can have a status code + reason + - Client should echo back close frames properly + +3. **Fragmentation** + - Tests various fragmentation scenarios + - Client must properly handle continuation frames + +## 💡 Tips & Best Practices + +1. **Start with a subset** - Test cases 1-16 first to verify basic functionality +2. **Monitor heap** - Watch for memory leaks during long test runs +3. **Save reports** - Keep HTML reports for comparison after code changes +4. **Use automated script** - `run_tests.sh` handles most edge cases automatically +5. **Check logs** - Serial output shows which test cases are running/failing + +## 🎯 Goal + +A high-quality WebSocket implementation should: +- Pass all core protocol tests (1.* through 11.*) +- Have minimal "Non-Strict" results +- Have no "Failed" results in critical areas + +**Typical good results:** +- ✅ 95%+ Pass rate on core tests (1.*, 2.*, 4.*, 5.*, 7.*) +- ⚠️ Some Non-Strict results are acceptable +- ❌ Zero Failed results in critical areas (framing, opcodes, close) + +## 📚 References + +- [Autobahn Testsuite Documentation](https://crossbar.io/autobahn/) +- [RFC 6455 - The WebSocket Protocol](https://tools.ietf.org/html/rfc6455) +- [Autobahn Testsuite GitHub](https://github.com/crossbario/autobahn-testsuite) + +## 🐛 Reporting Issues + +If you find protocol compliance issues with `esp_websocket_client`, please report them with: +1. The specific test case number that failed +2. The test report HTML output +3. ESP32 logs during the test +4. Your configuration (sdkconfig) + +## 📝 License + +This testsuite setup is provided under the same license as esp_websocket_client. +The Autobahn Testsuite itself is licensed under Apache 2.0. diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/config/fuzzingserver-quick.json b/components/esp_websocket_client/examples/autobahn-testsuite/config/fuzzingserver-quick.json new file mode 100644 index 0000000000..c562647512 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/config/fuzzingserver-quick.json @@ -0,0 +1,12 @@ +{ + "url": "ws://0.0.0.0:9001", + "options": { + "failByDrop": false + }, + "outdir": "/reports", + "webport": 8080, + "cases": ["1.*", "2.*", "3.*", "4.*", "5.*", "7.*", "10.*"], + "exclude-cases": [], + "exclude-agent-cases": {}, + "_comment": "Quick test config - runs ~150 core tests, excludes UTF-8 (6.*), performance (9.*), and compression (12.*, 13.*)" +} diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/config/fuzzingserver.json b/components/esp_websocket_client/examples/autobahn-testsuite/config/fuzzingserver.json new file mode 100644 index 0000000000..ea615097b7 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/config/fuzzingserver.json @@ -0,0 +1,15 @@ +{ + "url": "ws://0.0.0.0:9001", + "options": { + "failByDrop": false + }, + "outdir": "/reports", + "webport": 8080, + "cases": ["*"], + "exclude-cases": [ + "9.*", + "12.*", + "13.*" + ], + "exclude-agent-cases": {} +} diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/docker-compose.yml b/components/esp_websocket_client/examples/autobahn-testsuite/docker-compose.yml new file mode 100644 index 0000000000..d3df30a6c2 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/docker-compose.yml @@ -0,0 +1,12 @@ +services: + fuzzing-server: + image: crossbario/autobahn-testsuite:latest + container_name: ws-fuzzing-server + platform: linux/amd64 # <— enforce amd64, use QEMU on Apple Silicon + ports: + - "9001:9001" + - "8080:8080" + volumes: + - ./config:/config + - ./reports:/reports + command: wstest -m fuzzingserver -s /config/fuzzingserver.json diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/run_tests.sh b/components/esp_websocket_client/examples/autobahn-testsuite/run_tests.sh new file mode 100755 index 0000000000..d34058e460 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/run_tests.sh @@ -0,0 +1,262 @@ +#!/bin/bash + +# Autobahn Testsuite Runner Script +# This script automates the process of running WebSocket protocol compliance tests + +set -e + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +cd "$SCRIPT_DIR" + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color + +echo -e "${BLUE}======================================${NC}" +echo -e "${BLUE}Autobahn WebSocket Testsuite Runner${NC}" +echo -e "${BLUE}======================================${NC}" +echo + +# Check if Docker is running +if ! docker info > /dev/null 2>&1; then + echo -e "${RED}Error: Docker is not running${NC}" + echo "Please start Docker and try again" + exit 1 +fi + +# Function to get host IP +get_host_ip() { + if [[ "$OSTYPE" == "darwin"* ]]; then + # macOS + ifconfig | grep "inet " | grep -v 127.0.0.1 | awk '{print $2}' | head -1 + else + # Linux + hostname -I | awk '{print $1}' + fi +} + +HOST_IP=$(get_host_ip) + +echo -e "${GREEN}Step 1: Starting Autobahn Fuzzing Server${NC}" +echo "Host IP detected: $HOST_IP" +echo + +# Start the fuzzing server +docker-compose up -d + +# Wait for server to be ready +echo "Waiting for fuzzing server to start..." +sleep 5 + +# Check if container is running +if ! docker ps | grep -q ws-fuzzing-server; then + echo -e "${RED}Error: Fuzzing server failed to start${NC}" + docker-compose logs + exit 1 +fi + +echo -e "${GREEN}✓ Fuzzing server started successfully${NC}" +echo " WebSocket endpoint: ws://$HOST_IP:9001" +echo " Web interface: http://$HOST_IP:8080" +echo + +echo -e "${YELLOW}Step 2: Running Tests on ESP32${NC}" +echo + +# Check for ESP32 port argument +ESP_PORT="${1:-}" +BUILD_ONLY="${2:-}" + +if [ -z "$ESP_PORT" ]; then + echo -e "${YELLOW}No serial port specified. Manual mode:${NC}" + echo + echo "To run tests automatically, provide the ESP32 serial port:" + echo " ./run_tests.sh /dev/ttyUSB0" + echo + echo "Or build only:" + echo " ./run_tests.sh /dev/ttyUSB0 build" + echo + echo "Manual steps:" + echo " 1. Update WiFi credentials in testee/sdkconfig.defaults" + echo " 2. Update Autobahn server URI to: ws://$HOST_IP:9001" + echo " 3. Build and flash:" + echo " cd testee" + echo " idf.py build" + echo " idf.py -p PORT flash monitor" + echo + echo -e "${YELLOW}Press Enter when ESP32 testing is complete...${NC}" + read +else + echo "ESP32 serial port: $ESP_PORT" + echo + + # Check if ESP-IDF is available + if ! command -v idf.py > /dev/null 2>&1; then + echo -e "${RED}Error: ESP-IDF not found in PATH${NC}" + echo "Please source ESP-IDF environment: . \$HOME/esp/esp-idf/export.sh" + exit 1 + fi + + # Check if port exists + if [ ! -e "$ESP_PORT" ]; then + echo -e "${RED}Error: Serial port $ESP_PORT does not exist${NC}" + exit 1 + fi + + cd testee + + # Verify we're in the right directory + if [ ! -f "sdkconfig.defaults" ] && [ ! -f "sdkconfig" ]; then + echo -e "${RED}Error: Not in testee directory or sdkconfig files not found${NC}" + echo " Current directory: $(pwd)" + exit 1 + fi + + # Update server URI in sdkconfig.defaults if needed + if [ -f "sdkconfig.defaults" ]; then + if ! grep -q "CONFIG_AUTOBAHN_SERVER_URI.*$HOST_IP" sdkconfig.defaults 2>/dev/null; then + echo -e "${YELLOW}Updating server URI in sdkconfig.defaults...${NC}" + # Backup original + cp sdkconfig.defaults sdkconfig.defaults.bak 2>/dev/null || true + # Update URI (simple approach - user should verify) + if [[ "$OSTYPE" == "darwin"* ]]; then + # macOS sed requires different syntax + sed -i '' "s|CONFIG_AUTOBAHN_SERVER_URI=.*|CONFIG_AUTOBAHN_SERVER_URI=\"ws://$HOST_IP:9001\"|" sdkconfig.defaults 2>/dev/null || true + else + sed -i.bak "s|CONFIG_AUTOBAHN_SERVER_URI=.*|CONFIG_AUTOBAHN_SERVER_URI=\"ws://$HOST_IP:9001\"|" sdkconfig.defaults 2>/dev/null || true + fi + fi + fi + + echo -e "${GREEN}Building testee...${NC}" + idf.py build + + if [ "$BUILD_ONLY" = "build" ]; then + echo -e "${GREEN}Build complete. Flash manually with:${NC}" + echo " idf.py -p $ESP_PORT flash monitor" + cd .. + exit 0 + fi + + echo -e "${GREEN}Flashing ESP32...${NC}" + idf.py -p "$ESP_PORT" flash + + echo + echo -e "${YELLOW}Waiting for ESP32 to boot (5 seconds)...${NC}" + sleep 5 + + # Check if CONFIG_WEBSOCKET_URI_FROM_STDIN is enabled + # We're in testee directory, so check files here + URI_FROM_STDIN="" + if [ -f "sdkconfig" ] && grep -q "CONFIG_WEBSOCKET_URI_FROM_STDIN=y" sdkconfig 2>/dev/null; then + URI_FROM_STDIN="ws://$HOST_IP:9001" + echo -e "${GREEN}✓ CONFIG_WEBSOCKET_URI_FROM_STDIN is enabled (from sdkconfig)${NC}" + echo " URI will be sent via serial" + elif [ -f "sdkconfig.defaults" ] && grep -q "CONFIG_WEBSOCKET_URI_FROM_STDIN=y" sdkconfig.defaults 2>/dev/null; then + URI_FROM_STDIN="ws://$HOST_IP:9001" + echo -e "${GREEN}✓ CONFIG_WEBSOCKET_URI_FROM_STDIN is enabled (from sdkconfig.defaults)${NC}" + echo " URI will be sent via serial" + else + echo -e "${YELLOW}⚠ CONFIG_WEBSOCKET_URI_FROM_STDIN is not enabled${NC}" + echo " ESP32 will use URI from sdkconfig.defaults" + echo " Make sure CONFIG_AUTOBAHN_SERVER_URI is set correctly" + fi + + echo + echo -e "${GREEN}Starting test execution...${NC}" + echo + echo -e "${YELLOW}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + echo -e "${BLUE}Serial output (tests in progress):${NC}" + echo -e "${YELLOW}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + echo + echo -e "${YELLOW}Note: Connection errors are expected for some test cases${NC}" + echo " (e.g., error handling tests). The testee will continue automatically." + echo + echo -e "${GREEN}Look for test case progress messages like:${NC}" + echo " 'Starting test case X...'" + echo " 'Test case X completed'" + echo + echo -e "${YELLOW}If no output appears, try:${NC}" + echo " - Press RESET button on ESP32" + echo " - Check WiFi credentials in sdkconfig.defaults" + echo " - Verify serial port: $ESP_PORT" + echo + echo -e "${YELLOW}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + echo + + cd .. + + # Use monitor_serial.py to monitor and detect completion + # Only send URI if CONFIG_WEBSOCKET_URI_FROM_STDIN is enabled + if python3 scripts/monitor_serial.py --port "$ESP_PORT" ${URI_FROM_STDIN:+--uri "$URI_FROM_STDIN"} --timeout 2400; then + echo + echo -e "${GREEN}✓ Tests completed successfully!${NC}" + echo " Check the web interface for detailed results" + else + echo + echo -e "${YELLOW}⚠ Test execution ended (timeout or error)${NC}" + echo " This may be normal if:" + echo " - Tests are still running (check serial output)" + echo " - Connection errors occurred (some are expected)" + echo " - Timeout was reached" + echo + echo " Check serial output above and web interface for details" + fi +fi + +echo +echo -e "${GREEN}Step 3: Viewing Results${NC}" +echo + +# Define report paths +REPORT_FILE="$SCRIPT_DIR/reports/clients/index.html" +WEB_INTERFACE="http://localhost:8080" + +# Check if reports directory exists and has content +if [ -d "$SCRIPT_DIR/reports/clients" ] && [ "$(ls -A $SCRIPT_DIR/reports/clients/*.json 2>/dev/null)" ]; then + echo -e "${GREEN}✓ Test reports found${NC}" + echo + + # Generate and display summary + echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + echo -e "${BLUE}Generating Test Summary...${NC}" + echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" + echo + + if python3 "$SCRIPT_DIR/scripts/generate_summary.py" 2>/dev/null; then + echo + echo -e "${GREEN}✓ Summary generated successfully${NC}" + else + echo -e "${YELLOW}⚠ Could not generate summary (reports may still be processing)${NC}" + echo " Run manually: cd $SCRIPT_DIR && python3 scripts/generate_summary.py" + fi +else + echo -e "${YELLOW}⚠ No test reports found yet${NC}" + echo " Reports may still be generating, or tests may not have completed" + echo " Check the web interface or wait a few moments and run:" + echo " cd $SCRIPT_DIR && python3 scripts/generate_summary.py" +fi + +echo +echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" +echo -e "${BLUE}Test Reports Available:${NC}" +echo -e "${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}" +if [[ -f "$REPORT_FILE" ]]; then + echo " 📄 Direct file: file://$REPORT_FILE" +fi +echo " 🌐 Web interface: $WEB_INTERFACE (click 'Client Reports' link)" +echo " 📁 Directory: $SCRIPT_DIR/reports/clients/" +if [[ -f "$SCRIPT_DIR/reports/summary.html" ]]; then + echo " 📊 Summary: file://$SCRIPT_DIR/reports/summary.html" +fi +echo + +echo -e "${YELLOW}To stop the fuzzing server:${NC}" +echo " docker-compose down" +echo + +echo -e "${GREEN}Testing complete!${NC}" diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/scripts/analyze_results.py b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/analyze_results.py new file mode 100755 index 0000000000..f8eebaf5f0 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/analyze_results.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python3 +# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD +# SPDX-License-Identifier: Unlicense OR CC0-1.0 +""" +Autobahn Test Results Analyzer + +Parses the JSON report from Autobahn testsuite and provides a summary. +""" + +import json +import sys +from collections import defaultdict +from pathlib import Path + + +def analyze_results(report_path: Path) -> int: + """Analyze the Autobahn test results JSON file.""" + + with open(report_path, 'r') as f: + data = json.load(f) + + # Find the agent name (should be esp_websocket_client) + agents = list(data.keys()) + if not agents: + print('❌ No test results found in report') + return 1 + + agent = agents[0] + results = data[agent] + + print(f"\n{'='*60}") + print('Autobahn WebSocket Testsuite Results') + print(f'Agent: {agent}') + print(f"{'='*60}\n") + + # Count results by behavior + counts = defaultdict(int) + by_category = defaultdict(lambda: defaultdict(int)) + failures = [] + + for case_id, result in results.items(): + behavior = result.get('behavior', 'UNKNOWN') + counts[behavior] += 1 + + # Extract category (e.g., "1" from "1.2.3") + category = case_id.split('.')[0] + by_category[category][behavior] += 1 + + if behavior in ['FAILED', 'UNIMPLEMENTED']: + failures.append({ + 'case': case_id, + 'behavior': behavior, + 'description': result.get('description', 'N/A'), + 'result': result.get('behaviorClose', 'N/A') + }) + + # Print overall summary + total = sum(counts.values()) + print(f'Overall Results ({total} tests):') + print( + f" ✅ PASSED: {counts['OK']:3d} " + f"({counts['OK']/total*100:5.1f}%)", + ) + print( + f" ⚠️ NON-STRICT: {counts['NON-STRICT']:3d} " + f"({counts['NON-STRICT']/total*100:5.1f}%)", + ) + print( + f" ℹ️ INFORMATIONAL: {counts['INFORMATIONAL']:3d} " + f"({counts['INFORMATIONAL']/total*100:5.1f}%)", + ) + print( + f" ❌ FAILED: {counts['FAILED']:3d} " + f"({counts['FAILED']/total*100:5.1f}%)", + ) + print( + f" ⭕ UNIMPLEMENTED: {counts['UNIMPLEMENTED']:3d} " + f"({counts['UNIMPLEMENTED']/total*100:5.1f}%)", + ) + print() + + # Print by category + print('Results by Category:') + print( + f" {'Cat':<4} {'Pass':>6} {'N-S':>6} {'Info':>6} " + f"{'Fail':>6} {'N/I':>6} {'Total':>6}", + ) + print( + f" {'-'*4} {'-'*6} {'-'*6} {'-'*6} " + f"{'-'*6} {'-'*6} {'-'*6}", + ) + + for category in sorted( + by_category.keys(), + key=lambda x: int(x) if x.isdigit() else 999, + ): + cat_results = by_category[category] + cat_total = sum(cat_results.values()) + print( + f" {category:>3}. " + f"{cat_results['OK']:>6} " + f"{cat_results['NON-STRICT']:>6} " + f"{cat_results['INFORMATIONAL']:>6} " + f"{cat_results['FAILED']:>6} " + f"{cat_results['UNIMPLEMENTED']:>6} " + f'{cat_total:>6}', + ) + + print() + + # Print failures details + if failures: + print(f'\n❌ Failed/Unimplemented Tests ({len(failures)}):') + print(f" {'-'*60}") + for fail in failures: + print(f" Case {fail['case']}: {fail['behavior']}") + print(f" Description: {fail['description'][:70]}") + print(f" Result: {fail['result']}") + print() + else: + print('\n🎉 All tests passed or acceptable! No failures.') + + # Quality assessment + print(f"\n{'='*60}") + print('Quality Assessment:') + + pass_rate = (counts['OK'] + counts['NON-STRICT']) / total * 100 + fail_rate = counts['FAILED'] / total * 100 + + if fail_rate == 0 and counts['OK'] > total * 0.8: + print(' 🌟 EXCELLENT - Production ready!') + elif fail_rate < 5 and pass_rate > 85: + print(' ✅ GOOD - Minor issues to address') + elif fail_rate < 10 and pass_rate > 70: + print(' ⚠️ FAIR - Several issues need fixing') + else: + print(' ❌ POOR - Significant protocol compliance issues') + + print(f"{'='*60}\n") + + return 0 if fail_rate < 5 else 1 + + +def main() -> int: + if len(sys.argv) > 1: + report_path = Path(sys.argv[1]) + else: + # Default location + script_dir = Path(__file__).parent.parent + report_path = script_dir / 'reports' / 'clients' / 'index.json' + + if not report_path.exists(): + print(f'❌ Error: Report file not found: {report_path}') + print('\nUsage: python analyze_results.py [path/to/index.json]') + print(f'Default: {report_path}') + return 1 + + return analyze_results(report_path) + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/scripts/generate_summary.py b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/generate_summary.py new file mode 100755 index 0000000000..b8d9d6ef42 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/generate_summary.py @@ -0,0 +1,756 @@ +#!/usr/bin/env python3 +# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD +# SPDX-License-Identifier: Unlicense OR CC0-1.0 +""" +Autobahn Test Results Summary Generator + +This script parses all JSON test results and generates a comprehensive summary. +""" + +import glob +import json +import os +from collections import defaultdict +from datetime import datetime +from pathlib import Path + + +# ANSI color codes for terminal output +class Colors: + GREEN = '\033[92m' + RED = '\033[91m' + YELLOW = '\033[93m' + BLUE = '\033[94m' + CYAN = '\033[96m' + MAGENTA = '\033[95m' + BOLD = '\033[1m' + END = '\033[0m' + + +# Test category descriptions +CATEGORIES = { + '1': { + 'name': 'Framing', + 'critical': True, + 'description': 'Basic frame structure', + }, + '2': { + 'name': 'Ping/Pong', + 'critical': True, + 'description': 'Control frames', + }, + '3': { + 'name': 'Reserved Bits', + 'critical': True, + 'description': 'RSV validation', + }, + '4': { + 'name': 'Opcodes', + 'critical': True, + 'description': 'Valid/invalid opcodes', + }, + '5': { + 'name': 'Fragmentation', + 'critical': True, + 'description': 'Message fragments', + }, + '6': { + 'name': 'UTF-8', + 'critical': False, + 'description': 'Text validation', + }, + '7': { + 'name': 'Close Handshake', + 'critical': True, + 'description': 'Connection closing', + }, + '9': { + 'name': 'Performance', + 'critical': False, + 'description': 'Large messages', + }, + '10': { + 'name': 'Miscellaneous', + 'critical': True, + 'description': 'Edge cases', + }, + '12': { + 'name': 'Compression', + 'critical': False, + 'description': 'RFC 7692', + }, + '13': { + 'name': 'Compression', + 'critical': False, + 'description': 'RFC 7692', + }, +} + + +def get_category_from_test_id(test_id): + """Extract category number from test ID (e.g., '1.1.1' -> '1')""" + return test_id.split('.')[0] + + +def parse_test_results(reports_dir): + """Parse all JSON test results""" + results = [] + # Look for JSON files in reports directory and reports/clients subdirectory + json_files = glob.glob(os.path.join(reports_dir, "*.json")) + json_files.extend(glob.glob(os.path.join(reports_dir, "clients", "*.json"))) + + for json_file in json_files: + try: + with open(json_file, 'r') as f: + data = json.load(f) + results.append({ + 'id': data.get('id', 'unknown'), + 'behavior': data.get('behavior', 'UNKNOWN'), + 'description': data.get('description', ''), + 'duration': data.get('duration', 0), + 'result': data.get('result', ''), + }) + except Exception as e: + print(f"Error parsing {json_file}: {e}") + + # Sort by test ID with proper numeric sorting + def sort_key(result): + parts = result['id'].split('.') + # Convert to sortable format: pad numbers, keep strings + sortable = [] + for p in parts: + if p.isdigit(): + sortable.append((0, int(p))) # numbers sort before strings + else: + sortable.append((1, p)) # strings sort after numbers + return tuple(sortable) + + return sorted(results, key=sort_key) + + +def generate_summary(results): + """Generate comprehensive summary statistics""" + + # Overall stats + total = len(results) + by_behavior = defaultdict(int) + by_category = defaultdict(lambda: defaultdict(int)) + + for result in results: + behavior = result['behavior'] + by_behavior[behavior] += 1 + + category = get_category_from_test_id(result['id']) + by_category[category][behavior] += 1 + + return { + 'total': total, + 'by_behavior': dict(by_behavior), + 'by_category': dict(by_category), + } + + +def calculate_grade(pass_rate): + """Calculate letter grade based on pass rate""" + if pass_rate >= 90: + return 'A', Colors.GREEN + elif pass_rate >= 80: + return 'B', Colors.GREEN + elif pass_rate >= 70: + return 'C', Colors.YELLOW + elif pass_rate >= 60: + return 'D', Colors.YELLOW + else: + return 'F', Colors.RED + + +def print_banner(text): + """Print a banner""" + print() + print(f"{Colors.BLUE}{'=' * 80}{Colors.END}") + print(f"{Colors.BLUE}{Colors.BOLD}{text:^80}{Colors.END}") + print(f"{Colors.BLUE}{'=' * 80}{Colors.END}") + print() + + +def print_summary_table(summary): + """Print overall summary table""" + print_banner("OVERALL TEST RESULTS") + + total = summary['total'] + by_behavior = summary['by_behavior'] + + # Calculate percentages + passed = by_behavior.get('OK', 0) + failed = by_behavior.get('FAILED', 0) + non_strict = by_behavior.get('NON-STRICT', 0) + informational = by_behavior.get('INFORMATIONAL', 0) + unimplemented = by_behavior.get('UNIMPLEMENTED', 0) + + pass_rate = (passed / total * 100) if total > 0 else 0 + fail_rate = (failed / total * 100) if total > 0 else 0 + + grade, grade_color = calculate_grade(pass_rate) + + print(f'Total Tests: {Colors.BOLD}{total}{Colors.END}') + print() + print( + f'{Colors.GREEN}✅ PASSED: {passed:3d} ' + f'({pass_rate:5.1f}%){Colors.END}', + ) + print( + f'{Colors.RED}❌ FAILED: {failed:3d} ' + f'({fail_rate:5.1f}%){Colors.END}', + ) + if non_strict > 0: + print( + f'{Colors.YELLOW}⚠️ NON-STRICT: {non_strict:3d} ' + f'({non_strict/total*100:5.1f}%){Colors.END}', + ) + if informational > 0: + print( + f'{Colors.CYAN}ℹ️ INFORMATIONAL: {informational:3d} ' + f'({informational/total*100:5.1f}%){Colors.END}', + ) + if unimplemented > 0: + print( + f'{Colors.MAGENTA}🔧 UNIMPLEMENTED: {unimplemented:3d} ' + f'({unimplemented/total*100:5.1f}%){Colors.END}', + ) + + print() + print( + f'Overall Grade: {grade_color}{Colors.BOLD}{grade}{Colors.END}', + ) + print() + + # Embedded client rating + if pass_rate >= 70: + rating = "Excellent" + rating_color = Colors.GREEN + elif pass_rate >= 55: + rating = "Good" + rating_color = Colors.GREEN + elif pass_rate >= 40: + rating = "Acceptable" + rating_color = Colors.YELLOW + else: + rating = "Needs Improvement" + rating_color = Colors.RED + + print( + 'Embedded Client Rating: ' + f'{rating_color}{Colors.BOLD}{rating}{Colors.END}', + ) + print() + + +def print_category_breakdown(summary): + """Print detailed category breakdown""" + print_banner("RESULTS BY TEST CATEGORY") + + by_category = summary['by_category'] + + # Header + print(f"{'Category':<25} {'Total':>7} {'Pass':>7} {'Fail':>7} {'Rate':>8} {'Critical':>10} {'Grade':>7}") + print(f"{'-'*25} {'-'*7} {'-'*7} {'-'*7} {'-'*8} {'-'*10} {'-'*7}") + + # Sort categories numerically + sorted_categories = sorted(by_category.keys(), key=lambda x: int(x) if x.isdigit() else 999) + + for cat_num in sorted_categories: + cat_stats = by_category[cat_num] + cat_info = CATEGORIES.get(cat_num, {'name': f'Category {cat_num}', 'critical': True}) + + total = sum(cat_stats.values()) + passed = cat_stats.get('OK', 0) + failed = cat_stats.get('FAILED', 0) + pass_rate = (passed / total * 100) if total > 0 else 0 + + grade, grade_color = calculate_grade(pass_rate) + + # Format category name + cat_name = f"{cat_num}.* {cat_info['name']}" + critical = "Yes" if cat_info['critical'] else "No" + + # Color code the pass rate + if pass_rate >= 70: + rate_color = Colors.GREEN + elif pass_rate >= 50: + rate_color = Colors.YELLOW + else: + rate_color = Colors.RED + + print(f"{cat_name:<25} {total:>7} {passed:>7} {failed:>7} " + f"{rate_color}{pass_rate:>7.1f}%{Colors.END} {critical:>10} " + f"{grade_color}{grade:>7}{Colors.END}") + + print() + + +def print_failed_tests(results, limit=20): + """Print list of failed tests""" + print_banner("FAILED TESTS (First 20)") + + failed = [r for r in results if r['behavior'] == 'FAILED'] + + if not failed: + print(f'{Colors.GREEN}🎉 No failed tests!{Colors.END}\n') + return + + print( + 'Total Failed: ' + f'{Colors.RED}{Colors.BOLD}{len(failed)}{Colors.END}\n', + ) + + for i, test in enumerate(failed[:limit], 1): + print(f"{i:2d}. {Colors.RED}Test {test['id']:<12}{Colors.END} - {test['description'][:60]}") + if test['result']: + print(f" Reason: {test['result'][:100]}") + + if len(failed) > limit: + print(f"\n... and {len(failed) - limit} more failed tests") + + print() + + +def print_recommendations(summary): + """Print recommendations based on results""" + print_banner("RECOMMENDATIONS") + + by_category = summary['by_category'] + + # Check critical categories + critical_issues = [] + + for cat_num, cat_info in CATEGORIES.items(): + if not cat_info['critical']: + continue + + if cat_num not in by_category: + continue + + cat_stats = by_category[cat_num] + total = sum(cat_stats.values()) + passed = cat_stats.get('OK', 0) + pass_rate = (passed / total * 100) if total > 0 else 0 + + if pass_rate < 70: + critical_issues.append( + { + 'category': f"{cat_num}.* {cat_info['name']}", + 'pass_rate': pass_rate, + 'description': cat_info['description'], + }, + ) + + if critical_issues: + print(f"{Colors.RED}⚠️ Critical Issues Found:{Colors.END}\n") + for issue in critical_issues: + print(f" • {issue['category']}: {issue['pass_rate']:.1f}% pass rate") + print(f" {issue['description']} - This requires attention\n") + else: + print(f"{Colors.GREEN}✅ All critical test categories are performing well!{Colors.END}\n") + + # UTF-8 check + if '6' in by_category: + cat_stats = by_category['6'] + total = sum(cat_stats.values()) + passed = cat_stats.get('OK', 0) + pass_rate = (passed / total * 100) if total > 0 else 0 + + if pass_rate < 50: + print( + f'{Colors.YELLOW}ℹ️ UTF-8 Validation (6.*): ' + f'{pass_rate:.1f}% pass rate{Colors.END}', + ) + print( + ' This is acceptable for embedded clients in ' + 'trusted environments.', + ) + print( + ' Consider adding UTF-8 validation if operating in ' + 'untrusted networks.\n', + ) + + print(f'{Colors.CYAN}💡 Next Steps:{Colors.END}\n') + print(' 1. Review failed tests in detail (see HTML report)') + print(' 2. Focus on critical categories (1-5, 7, 10)') + print(' 3. UTF-8 failures (category 6) are acceptable for embedded clients') + print(' 4. Compare your results with reference implementations\n') + + +def generate_markdown_summary(summary, results): + """Generate markdown summary for GitHub Actions Job Summary""" + total = summary['total'] + by_behavior = summary['by_behavior'] + by_category = summary['by_category'] + + passed = by_behavior.get('OK', 0) + failed = by_behavior.get('FAILED', 0) + non_strict = by_behavior.get('NON-STRICT', 0) + informational = by_behavior.get('INFORMATIONAL', 0) + unimplemented = by_behavior.get('UNIMPLEMENTED', 0) + + pass_rate = (passed / total * 100) if total > 0 else 0 + grade, _ = calculate_grade(pass_rate) + + # Determine status emoji and color + if pass_rate >= 90: + status_emoji = "🟢" + elif pass_rate >= 70: + status_emoji = "🟡" + else: + status_emoji = "🔴" + + md = f"""# {status_emoji} Autobahn Test Suite Results + +## Overall Results + +| Metric | Value | +|--------|-------| +| **Total Tests** | {total} | +| **✅ Passed** | {passed} ({pass_rate:.1f}%) | +| **❌ Failed** | {failed} ({(failed/total*100) if total > 0 else 0:.1f}%) | +""" + + if non_strict > 0: + md += f"| **⚠️ Non-Strict** | {non_strict} ({non_strict/total*100:.1f}%) |\n" + if informational > 0: + md += f"| **ℹ️ Informational** | {informational} ({informational/total*100:.1f}%) |\n" + if unimplemented > 0: + md += f"| **🔧 Unimplemented** | {unimplemented} ({unimplemented/total*100:.1f}%) |\n" + + md += f""" +| **Overall Grade** | **{grade}** | +| **Pass Rate** | {pass_rate:.1f}% | + +### Progress Bar + + {pass_rate:.1f}% + +--- + +## Results by Category + +| Category | Name | Total | Passed | Failed | Pass Rate | Critical | +|----------|------|-------|--------|--------|-----------|----------| +""" + + sorted_categories = sorted(by_category.keys(), key=lambda x: int(x) if x.isdigit() else 999) + for cat_num in sorted_categories: + cat_stats = by_category[cat_num] + cat_info = CATEGORIES.get(cat_num, {'name': f'Category {cat_num}', 'critical': True}) + + total_cat = sum(cat_stats.values()) + passed_cat = cat_stats.get('OK', 0) + failed_cat = cat_stats.get('FAILED', 0) + pass_rate_cat = (passed_cat / total_cat * 100) if total_cat > 0 else 0 + + critical_mark = "✅ Yes" if cat_info['critical'] else "⚪ No" + pass_mark = "✅" if pass_rate_cat >= 70 else "⚠️" if pass_rate_cat >= 50 else "❌" + + md += f"| **{cat_num}.* | {cat_info['name']} | {total_cat} | {passed_cat} | {failed_cat} | {pass_mark} {pass_rate_cat:.1f}% | {critical_mark} |\n" + + # Failed tests section + failed_tests = [r for r in results if r['behavior'] == 'FAILED'] + if failed_tests: + md += f""" +--- + +## ❌ Failed Tests ({len(failed_tests)} total) + +""" + for i, test in enumerate(failed_tests[:20], 1): + md += f"{i}. **Test {test['id']}**: {test['description'][:80]}\n" + if test['result']: + md += f" - Reason: {test['result'][:100]}\n" + + if len(failed_tests) > 20: + md += f"\n*... and {len(failed_tests) - 20} more failed tests*\n" + else: + md += "\n---\n\n## 🎉 No Failed Tests!\n\n" + + # Recommendations + md += "\n---\n\n## 💡 Recommendations\n\n" + + critical_issues = [] + for cat_num, cat_info in CATEGORIES.items(): + if not cat_info['critical']: + continue + if cat_num not in by_category: + continue + + cat_stats = by_category[cat_num] + total_cat = sum(cat_stats.values()) + passed_cat = cat_stats.get('OK', 0) + pass_rate_cat = (passed_cat / total_cat * 100) if total_cat > 0 else 0 + + if pass_rate_cat < 70: + critical_issues.append({ + 'category': f"{cat_num}.* {cat_info['name']}", + 'pass_rate': pass_rate_cat, + }) + + if critical_issues: + md += "### ⚠️ Critical Issues Found:\n\n" + for issue in critical_issues: + md += f"- **{issue['category']}**: {issue['pass_rate']:.1f}% pass rate - Requires attention\n" + else: + md += "### ✅ All critical test categories are performing well!\n\n" + + md += """ +### Next Steps: +1. Review failed tests in detail (see uploaded HTML report) +2. Focus on critical categories (1-5, 7, 10) +3. UTF-8 failures (category 6) are acceptable for embedded clients +4. Compare results with reference implementations + +--- +*Generated by Autobahn Test Suite Summary Generator* +""" + + return md + + +def generate_html_summary(summary, results, output_file): + """Generate an HTML summary file""" + html = f""" + + + ESP WebSocket Client - Autobahn Test Summary + + + +
+

🔬 ESP WebSocket Client - Autobahn Test Suite Results

+

Generated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}

+ +

📊 Overall Results

+
+
+

{summary['total']}

+

Total Tests

+
+
+

{summary['by_behavior'].get('OK', 0)}

+

Passed ({summary['by_behavior'].get('OK', 0)/summary['total']*100:.1f}%)

+
+
+

{summary['by_behavior'].get('FAILED', 0)}

+

Failed ({summary['by_behavior'].get('FAILED', 0)/summary['total']*100:.1f}%)

+
+
+ +
+
+ {summary['by_behavior'].get('OK', 0)/summary['total']*100:.1f}% Pass Rate +
+
+ +
+ + Grade: {calculate_grade(summary['by_behavior'].get('OK', 0)/summary['total']*100)[0]} + +
+ +

📋 Results by Category

+ + + + + + + + + + + + + +""" + + # Add category rows + sorted_categories = sorted(summary['by_category'].keys(), key=lambda x: int(x) if x.isdigit() else 999) + for cat_num in sorted_categories: + cat_stats = summary['by_category'][cat_num] + cat_info = CATEGORIES.get(cat_num, {'name': f'Category {cat_num}', 'critical': True, 'description': 'Unknown'}) + + total = sum(cat_stats.values()) + passed = cat_stats.get('OK', 0) + failed = cat_stats.get('FAILED', 0) + pass_rate = (passed / total * 100) if total > 0 else 0 + + html += f""" + + + + + + + + + +""" + + html += """ + +
CategoryDescriptionTotalPassedFailedPass RateCritical
{cat_num}.*{cat_info['name']} - {cat_info['description']}{total}{passed}{failed} + {pass_rate:.1f}% + {'✅ Yes' if cat_info['critical'] else '⚪ No'}
+ +

❌ Failed Tests

+ + + + + + + + + +""" + + failed_tests = [r for r in results if r['behavior'] == 'FAILED'] + for test in failed_tests[:50]: # Limit to 50 in HTML + html += f""" + + + + + +""" + + if len(failed_tests) > 50: + html += f""" + + + +""" + + html += """ + +
Test IDDescriptionResult
{test['id']}{test['description']}{test['result'][:100]}
+ ... and {len(failed_tests) - 50} more failed tests (see individual reports) +
+ +

💡 Recommendations

+
+

For Embedded WebSocket Clients:

+
    +
  • ✅ Focus on passing critical categories: 1.* (Framing), 2.* (Ping/Pong), 5.* (Fragmentation), 7.* (Close)
  • +
  • ⚠️ UTF-8 validation failures (6.*) are acceptable in trusted environments
  • +
  • 🎯 Target >70% overall pass rate for production use
  • +
  • 🔍 Review individual test reports for specific implementation issues
  • +
+
+ +
+

+ View Detailed Reports: + Open Full Autobahn Report +

+
+
+ + +""" + + with open(output_file, 'w') as f: + f.write(html) + + print( + f'{Colors.GREEN}✅ HTML summary generated: {output_file}{Colors.END}\n', + ) + + +def main(): + """Main entry point""" + script_dir = Path(__file__).parent + reports_dir = script_dir.parent / 'reports' + + # Check if GitHub Actions summary file path is provided + github_summary_file = os.environ.get('GITHUB_STEP_SUMMARY') + + if not reports_dir.exists(): + print( + f'{Colors.RED}Error: Reports directory not found: ' + f'{reports_dir}{Colors.END}', + ) + return 1 + + print( + f'{Colors.CYAN}📖 Parsing test results from: ' + f'{reports_dir}{Colors.END}', + ) + results = parse_test_results(str(reports_dir)) + + if not results: + print(f'{Colors.RED}Error: No test results found{Colors.END}') + return 1 + + print( + f'{Colors.GREEN}✅ Parsed {len(results)} ' + f'test results{Colors.END}', + ) + + summary = generate_summary(results) + + # Print console summary + print_summary_table(summary) + print_category_breakdown(summary) + print_failed_tests(results) + print_recommendations(summary) + + # Generate HTML summary + html_output = reports_dir / 'summary.html' + generate_html_summary(summary, results, str(html_output)) + + print(f'{Colors.CYAN}🌐 Open the summary in your browser:{Colors.END}') + print(f' file://{html_output.absolute()}\n') + + # Generate markdown summary for GitHub Actions + if github_summary_file: + md_summary = generate_markdown_summary(summary, results) + with open(github_summary_file, 'w') as f: + f.write(md_summary) + print(f'{Colors.GREEN}✅ GitHub Actions summary written to: {github_summary_file}{Colors.END}') + + return 0 + + +if __name__ == '__main__': + exit(main()) diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/scripts/monitor_serial.py b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/monitor_serial.py new file mode 100755 index 0000000000..f1a05da3ec --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/monitor_serial.py @@ -0,0 +1,213 @@ +#!/usr/bin/env python3 +# SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD +# SPDX-License-Identifier: Unlicense OR CC0-1.0 +""" +Serial monitor script for Autobahn test suite. +Monitors ESP32 serial output and detects test completion. +""" + +import argparse +import re +import sys +import time + +import serial + + +def main() -> None: + parser = argparse.ArgumentParser( + description=( + 'Monitor ESP32 serial output for Autobahn test completion' + ), + ) + + parser.add_argument( + '--port', + '-p', + default='/dev/ttyUSB0', + help='Serial port (default: /dev/ttyUSB0)', + ) + parser.add_argument( + '--baud', + '-b', + type=int, + default=115200, + help='Baud rate (default: 115200)', + ) + parser.add_argument( + '--timeout', + '-t', + type=int, + default=2400, + help='Timeout in seconds (default: 2400 = 40 minutes)', + ) + parser.add_argument( + '--completion-pattern', + '-c', + default=r'All tests completed\.', + help=( + 'Regex pattern to detect completion ' + '(default: "All tests completed.")' + ), + ) + parser.add_argument( + '--uri', + '-u', + default=None, + help=( + 'Server URI to send via serial (stdin). If provided, will send this ' + 'URI after opening port.' + ), + ) + + args = parser.parse_args() + + port = args.port + timeout_seconds = args.timeout + completion_pattern = re.compile(args.completion_pattern) + + print(f'Opening serial port: {port} at {args.baud} baud') + try: + ser = serial.Serial(port, args.baud, timeout=1) + print('Serial port opened successfully') + + try: + # If URI is provided, send it via serial (stdin) + if args.uri: + print(f'Sending server URI: {args.uri}') + print('Waiting for ESP32 to be ready...') + # Wait longer for ESP32 to boot and initialize + time.sleep(5) + # Send URI followed by newline + ser.write(f'{args.uri}\n'.encode('utf-8')) + ser.flush() + print('URI sent successfully') + print('Note: If ESP32 is not configured with CONFIG_WEBSOCKET_URI_FROM_STDIN,') + print(' it will use the URI from sdkconfig instead.') + + buffer = '' + start_time = time.time() + last_activity_time = start_time + last_status_time = start_time + test_case_pattern = re.compile(r'Case (\d+)/(\d+)') + last_test_case = None + initial_wait_time = 10 # Wait up to 10 seconds for initial output + exit_code = 0 + + print('\n--- Waiting for ESP32 output (tests should start shortly) ---\n') + print('If no output appears, the ESP32 may be:') + print(' - Still booting (wait a few more seconds)') + print(' - Connecting to WiFi') + print(' - Not configured to read URI from stdin') + print() + + while True: + elapsed = time.time() - start_time + if elapsed > timeout_seconds: + print( + '\n⚠ Timeout after ' + f'{timeout_seconds}s - tests may still be running', + ) + exit_code = 1 + break + + if ser.in_waiting: + data = ser.read(ser.in_waiting).decode( + 'utf-8', + errors='ignore', + ) + buffer += data + sys.stdout.write(data) + sys.stdout.flush() + last_activity_time = time.time() + + # Track test case progress + test_match = test_case_pattern.search(data) + if test_match: + current_case = int(test_match.group(1)) + total_cases = int(test_match.group(2)) + if current_case != last_test_case: + last_test_case = current_case + print( + f'\n[Progress: Test case {current_case}/{total_cases} ' + f'({100*current_case//total_cases}%)]', + file=sys.stderr + ) + + # Check for completion message + if completion_pattern.search(buffer): + print('\n✓ Test suite completed successfully!') + time.sleep(5) # Wait a bit more for any final output + exit_code = 0 + break + else: + # Show periodic status if no activity for a while + time_since_activity = time.time() - last_activity_time + time_since_status = time.time() - last_status_time + + # More frequent updates during initial wait + status_interval = 10 if elapsed < initial_wait_time else 30 + + if time_since_activity > status_interval and time_since_status > status_interval: + elapsed_min = int(elapsed // 60) + elapsed_sec = int(elapsed % 60) + if elapsed < initial_wait_time: + print( + f'\n[Status: Waiting for initial output... ' + f'({elapsed_sec}s elapsed)]', + file=sys.stderr + ) + else: + print( + f'\n[Status: Waiting for output... ' + f'({elapsed_min}m {elapsed_sec}s elapsed)]', + file=sys.stderr + ) + last_status_time = time.time() + + # After initial wait, suggest checking configuration + if elapsed >= initial_wait_time and last_activity_time == start_time: + print( + '\n[Warning: No output received yet. Possible issues:]', + file=sys.stderr + ) + print( + ' - ESP32 not configured with CONFIG_WEBSOCKET_URI_FROM_STDIN', + file=sys.stderr + ) + print( + ' - WiFi not connected (check credentials)', + file=sys.stderr + ) + print( + ' - Wrong serial port or baud rate', + file=sys.stderr + ) + print( + ' - ESP32 firmware not flashed correctly', + file=sys.stderr + ) + print( + '\n Try: Press RESET button on ESP32 or check serial connection', + file=sys.stderr + ) + + time.sleep(0.1) + + finally: + # Ensure serial port is closed even if an exception occurs + if ser.is_open: + ser.close() + + # Exit with appropriate code after cleanup + sys.exit(exit_code) + except serial.SerialException as e: + print(f'Error opening serial port: {e}', file=sys.stderr) + sys.exit(1) + except KeyboardInterrupt: + print('\nInterrupted by user') + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/scripts/quick_test.sh b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/quick_test.sh new file mode 100755 index 0000000000..b5ff117903 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/quick_test.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# Quick Test Script - Runs a minimal set of tests for rapid validation +# This is useful for development/debugging without waiting for all 300+ tests + +set -e + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )" +cd "$SCRIPT_DIR" + +echo "==========================================" +echo "Quick Test Mode - Core Tests Only" +echo "==========================================" +echo + +# Stop any existing server +docker-compose down 2>/dev/null || true + +# Backup original config (if it exists) before replacing with quick config +if [ -f config/fuzzingserver.json ]; then + cp config/fuzzingserver.json config/fuzzingserver.json.backup +fi + +# Copy quick config +cp config/fuzzingserver-quick.json config/fuzzingserver.json + +echo "Starting fuzzing server with quick test config..." +echo " Tests: Core protocol tests (~150 cases)" +echo " Excludes: UTF-8 validation, performance, compression" +echo + +docker-compose up -d + +echo +echo "✓ Quick test server started" +echo +echo "Run your ESP32 testee client now." +echo "Estimated time: 5-10 minutes" +echo +echo "View results at: http://localhost:8080" +echo +echo "To restore full test config:" +echo " ./scripts/restore_full_tests.sh" diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/scripts/restore_full_tests.sh b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/restore_full_tests.sh new file mode 100755 index 0000000000..595ee18cbd --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/scripts/restore_full_tests.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# Restore Full Test Configuration + +set -e + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )" +cd "$SCRIPT_DIR" + +echo "Restoring full test configuration..." + +# Stop server +docker-compose down + +# Restore original config +if [ -f config/fuzzingserver.json.backup ]; then + mv config/fuzzingserver.json.backup config/fuzzingserver.json + echo "✓ Original config restored" +else + # Create default full config + cat > config/fuzzingserver.json << 'EOF' +{ + "url": "ws://0.0.0.0:9001", + "options": { + "failByDrop": false + }, + "outdir": "/reports", + "webport": 8080, + "cases": ["*"], + "exclude-cases": [ + "9.*", + "12.*", + "13.*" + ], + "exclude-agent-cases": {} +} +EOF + echo "✓ Default full config created" +fi + +echo +echo "Full test configuration restored (~300 tests)" +echo "Start server with: docker-compose up -d" diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/testee/CMakeLists.txt b/components/esp_websocket_client/examples/autobahn-testsuite/testee/CMakeLists.txt new file mode 100644 index 0000000000..5daf45edd3 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/testee/CMakeLists.txt @@ -0,0 +1,20 @@ +cmake_minimum_required(VERSION 3.5) +include($ENV{IDF_PATH}/tools/cmake/project.cmake) + +# Add Linux compatibility components if building for Linux target +# COMPONENTS must be set before include() to limit component scanning +if(${IDF_TARGET} STREQUAL "linux") + set(common_component_dir ../../../../../common_components) + set(EXTRA_COMPONENT_DIRS + ../../.. + "${common_component_dir}/linux_compat/esp_timer" + "${common_component_dir}/linux_compat/freertos" + $ENV{IDF_PATH}/examples/protocols/linux_stubs/esp_stubs + $ENV{IDF_PATH}/examples/common_components/protocol_examples_common) + set(COMPONENTS main) +endif() + +# This override applies to this example only and does not touch ESP-IDF itself. +add_compile_options(-Wno-error=implicit-function-declaration) + +project(autobahn_testee) diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/CMakeLists.txt b/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/CMakeLists.txt new file mode 100644 index 0000000000..241b074948 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/CMakeLists.txt @@ -0,0 +1,10 @@ +# Conditionally require components based on target +if(${IDF_TARGET} STREQUAL "linux") + idf_component_register(SRCS "autobahn_testee.c" + INCLUDE_DIRS "." + REQUIRES esp_websocket_client protocol_examples_common esp_event esp_netif) +else() + idf_component_register(SRCS "autobahn_testee.c" + INCLUDE_DIRS "." + REQUIRES esp_websocket_client protocol_examples_common nvs_flash esp_wifi esp_event) +endif() diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/Kconfig.projbuild b/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/Kconfig.projbuild new file mode 100644 index 0000000000..b1a29c8741 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/Kconfig.projbuild @@ -0,0 +1,44 @@ +menu "Autobahn Testsuite Configuration" + + choice WEBSOCKET_URI_SOURCE + prompt "Autobahn Server URI Source" + default WEBSOCKET_URI_FROM_STRING + help + Choose how the Autobahn server URI is provided: + - From string: Use CONFIG_AUTOBAHN_SERVER_URI (compile-time) + - From stdin: Read URI from stdin (serial console at runtime) + + config WEBSOCKET_URI_FROM_STRING + bool "From string (CONFIG_AUTOBAHN_SERVER_URI)" + help + Use the URI defined in CONFIG_AUTOBAHN_SERVER_URI. + This is set at compile time. + + config WEBSOCKET_URI_FROM_STDIN + bool "From stdin (serial console)" + help + Read the URI from stdin (serial console) at runtime. + Useful for CI/CD where the server IP is only known at runtime. + The application will wait for a line containing the URI. + + endchoice + + config AUTOBAHN_SERVER_URI + string "Autobahn Fuzzing Server URI" + default "ws://192.168.1.100:9001" + depends on WEBSOCKET_URI_FROM_STRING + help + URI of the Autobahn fuzzing server. + Replace with your Docker host IP address. + Example: ws://192.168.1.100:9001 + Only used when WEBSOCKET_URI_FROM_STRING is selected. + + if CONFIG_IDF_TARGET = "linux" + config GCOV_ENABLED + bool "Coverage analyzer" + default n + help + Enables coverage analyzing for host tests. + endif + +endmenu diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/autobahn_testee.c b/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/autobahn_testee.c new file mode 100644 index 0000000000..a70a911d50 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/autobahn_testee.c @@ -0,0 +1,683 @@ +/* + * SPDX-FileCopyrightText: 2025 Espressif Systems (Shanghai) CO LTD + * + * SPDX-License-Identifier: Unlicense OR CC0-1.0 + */ +#include +#include +#include +#include "esp_log.h" +#include "esp_timer.h" +#include "esp_system.h" +#include "esp_event.h" +#include "esp_netif.h" +#include "protocol_examples_common.h" +#include "esp_websocket_client.h" +#include "esp_transport_ws.h" + +#if CONFIG_IDF_TARGET_LINUX +#include +#include +#include +#include +#else +#include "freertos/FreeRTOS.h" +#include "freertos/task.h" +#include "freertos/semphr.h" +#include "esp_heap_caps.h" +#include "nvs_flash.h" +#include "esp_wifi.h" +#endif + +#define TAG "autobahn" +#if CONFIG_WEBSOCKET_URI_FROM_STDIN +// URI will be read from stdin at runtime +static char g_autobahn_server_uri[256] = {0}; +#define AUTOBAHN_SERVER_URI g_autobahn_server_uri +#else +#define AUTOBAHN_SERVER_URI CONFIG_AUTOBAHN_SERVER_URI +#endif +#define BUFFER_SIZE 16384 // Reduced from 32768 to free memory for accumulator buffer +#define START_CASE 1 +#define END_CASE 300 +// Configure test range here: +// Category 1 (Framing): Tests 1-16 +// Category 2 (Ping/Pong): Tests 17-27 +// Category 3 (Reserved Bits): Tests 28-34 +// Category 4 (Opcodes): Tests 35-44 +// Category 5 (Fragmentation): Tests 45-64 +// Category 6 (UTF-8): Tests 65-209 +// Category 7 (Close Handshake): Tests 210-246 +// All tests: Tests 1-300 +#if CONFIG_IDF_TARGET_LINUX +static sem_t test_done_sem_storage; +static sem_t *test_done_sem = NULL; +#else +static SemaphoreHandle_t test_done_sem = NULL; +#endif +static bool test_running = false; + +#define MAX_FRAGMENTED_PAYLOAD 65537 // Maximum payload size for fragmented frames (case 1.1.6=65535, 1.1.7=65536) + +typedef struct { + uint8_t *buffer; + size_t capacity; + size_t expected_len; + size_t received; + uint8_t opcode; + bool active; +} ws_accumulator_t; + +static ws_accumulator_t s_accumulator = {0}; +static uint8_t *s_accum_buffer = NULL; // Pre-allocated buffer for fragmented frames + +static void ws_accumulator_reset(void) +{ + // Reset state but keep buffer allocated for reuse + s_accumulator.expected_len = 0; + s_accumulator.received = 0; + s_accumulator.opcode = 0; + s_accumulator.active = false; +} + +static void ws_accumulator_cleanup(void) +{ + ws_accumulator_reset(); + if (s_accum_buffer) { + free(s_accum_buffer); + s_accum_buffer = NULL; + ESP_LOGD(TAG, "Freed accumulator buffer"); + } +} + +static esp_err_t ws_accumulator_prepare(size_t total_len, uint8_t opcode) +{ + if (total_len == 0) { + return ESP_OK; + } + + if (total_len > MAX_FRAGMENTED_PAYLOAD) { + ESP_LOGE(TAG, "Payload too large (%zu > %d)", total_len, MAX_FRAGMENTED_PAYLOAD); + return ESP_ERR_INVALID_SIZE; + } + + // Allocate buffer on-demand when first fragmented frame is detected + // This avoids allocating 64KB upfront which can cause memory exhaustion + if (!s_accum_buffer) { + size_t free_heap = esp_get_free_heap_size(); +#if CONFIG_IDF_TARGET_LINUX + size_t largest_free = free_heap; // Linux has plenty of memory +#else + size_t largest_free = heap_caps_get_largest_free_block(MALLOC_CAP_DEFAULT); +#endif + ESP_LOGD(TAG, "Attempting accumulator alloc: need=%zu, free=%zu, largest_block=%zu", + MAX_FRAGMENTED_PAYLOAD, free_heap, largest_free); + + s_accum_buffer = (uint8_t *)malloc(MAX_FRAGMENTED_PAYLOAD); + if (!s_accum_buffer) { + ESP_LOGE(TAG, "Accumulator alloc failed (%zu bytes) - Free heap: %zu, largest block: %zu", + total_len, free_heap, largest_free); +#if !CONFIG_IDF_TARGET_LINUX + ESP_LOGE(TAG, "ESP32-S2 may not have enough RAM. Consider reducing BUFFER_SIZE or using SPIRAM"); +#endif + return ESP_ERR_NO_MEM; + } + ESP_LOGD(TAG, "Allocated accumulator buffer: %d bytes (Free heap: %lu)", + MAX_FRAGMENTED_PAYLOAD, esp_get_free_heap_size()); + } + + s_accumulator.buffer = s_accum_buffer; + s_accumulator.capacity = MAX_FRAGMENTED_PAYLOAD; + s_accumulator.expected_len = total_len; + s_accumulator.received = 0; + s_accumulator.opcode = opcode; + s_accumulator.active = true; + return ESP_OK; +} + +/* ------------------------------------------------------------ + * Low‑latency echo handler + * ------------------------------------------------------------ */ +static void websocket_event_handler(void *handler_args, + esp_event_base_t base, + int32_t event_id, + void *event_data) +{ + esp_websocket_event_data_t *data = (esp_websocket_event_data_t *)event_data; + esp_websocket_client_handle_t client = (esp_websocket_client_handle_t)handler_args; + + switch (event_id) { + case WEBSOCKET_EVENT_CONNECTED: + ESP_LOGI(TAG, "Connected"); + test_running = true; + break; + + case WEBSOCKET_EVENT_DISCONNECTED: + ESP_LOGI(TAG, "Disconnected"); + test_running = false; + ws_accumulator_reset(); // Reset state but keep buffer for next test +#if CONFIG_IDF_TARGET_LINUX + if (test_done_sem) { + sem_post(test_done_sem); + } +#else + if (test_done_sem) { + xSemaphoreGive(test_done_sem); + } +#endif + break; + + case WEBSOCKET_EVENT_DATA: { + ESP_LOGI(TAG, "WEBSOCKET_EVENT_DATA: opcode=0x%02X len=%d fin=%d payload_len=%d offset=%d", + data->op_code, data->data_len, data->fin, data->payload_len, data->payload_offset); + + // Safety check: if not connected, don't process data + if (!test_running || !esp_websocket_client_is_connected(client)) { + ESP_LOGW(TAG, "Received data but not connected, ignoring"); + ws_accumulator_reset(); + break; + } + + /* ---- skip control frames ---- */ + if (data->op_code >= 0x08) { + if (data->op_code == 0x09) { + ESP_LOGD(TAG, "PING -> PONG auto-sent"); + } + break; + } + + /* ---- Determine opcode to echo ---- */ + uint8_t send_opcode = 0; + if (data->op_code == 0x1) { + send_opcode = WS_TRANSPORT_OPCODES_TEXT; + } else if (data->op_code == 0x2) { + send_opcode = WS_TRANSPORT_OPCODES_BINARY; + } else if (data->op_code == 0x0) { + send_opcode = WS_TRANSPORT_OPCODES_CONT; + } else { + ESP_LOGW(TAG, "Unsupported opcode 0x%02X - skip", data->op_code); + break; + } + + /* Note: send_with_opcode always sets FIN bit, which is correct for these + * simple test cases (all have FIN=1). For fragmented messages, we'd need + * send_with_exact_opcode, but it's not public. */ + + // Safety check: validate data pointer before processing + if (!data->data_ptr && data->data_len > 0) { + ESP_LOGE(TAG, "NULL data pointer with non-zero length: %d", data->data_len); + break; + } + + const uint8_t *payload = (const uint8_t *)data->data_ptr; + size_t len = data->data_len; + + // Check if this is a fragmented message (either WebSocket fragmentation or TCP-level fragmentation) + // The WebSocket layer reads large frames in chunks and dispatches multiple events: + // - payload_len = total frame size (set on all chunks) + // - payload_offset = current offset (0, buffer_size, 2*buffer_size, ...) + // - data_len = current chunk size + // - fin = 1 only on the last chunk + // So fragmentation is detected if: payload_len > data_len OR payload_offset > 0 + size_t total_len = data->payload_len ? data->payload_len : data->data_len; + bool fragmented = (data->payload_len > 0 && data->payload_len > data->data_len) || + (data->payload_offset > 0); + + ESP_LOGD(TAG, "Fragmentation check: offset=%d payload_len=%d data_len=%d total_len=%zu fragmented=%d", + data->payload_offset, data->payload_len, data->data_len, total_len, fragmented); + + if (fragmented && total_len > 0) { + // Additional safety: check if buffer pointer is valid + if (!s_accum_buffer) { + ESP_LOGE(TAG, "Accumulator buffer not allocated, attempting allocation..."); + s_accum_buffer = (uint8_t *)malloc(MAX_FRAGMENTED_PAYLOAD); + if (!s_accum_buffer) { + ESP_LOGE(TAG, "Failed to allocate accumulator buffer, skipping fragmented message"); + break; + } + } + + if (data->payload_offset == 0 || !s_accumulator.active) { + if (ws_accumulator_prepare(total_len, send_opcode) != ESP_OK) { + ESP_LOGE(TAG, "Cannot allocate buffer for fragmented frame len=%zu", total_len); + break; + } + } else if (total_len != s_accumulator.expected_len) { + ESP_LOGW(TAG, "Payload len changed mid-message (%zu -> %zu) - reset accumulator", + s_accumulator.expected_len, total_len); + ws_accumulator_reset(); + if (ws_accumulator_prepare(total_len, send_opcode) != ESP_OK) { + break; + } + } + + if (!s_accumulator.active || !s_accumulator.buffer) { + ESP_LOGE(TAG, "Accumulator inactive or buffer NULL while processing fragments"); + ws_accumulator_reset(); + break; + } + + size_t offset = data->payload_offset; + // Safety checks before memcpy + if (!data->data_ptr && data->data_len > 0) { + ESP_LOGE(TAG, "NULL data pointer with non-zero length: %d", data->data_len); + ws_accumulator_reset(); + break; + } + if (offset + data->data_len > s_accumulator.capacity) { + ESP_LOGE(TAG, "Accumulator overflow: off=%zu chunk=%d cap=%zu", + offset, data->data_len, s_accumulator.capacity); + ws_accumulator_reset(); + break; + } + if (offset + data->data_len > s_accumulator.expected_len) { + ESP_LOGE(TAG, "Data exceeds expected length: off=%zu chunk=%d expected=%zu", + offset, data->data_len, s_accumulator.expected_len); + ws_accumulator_reset(); + break; + } + + // Final safety check before memcpy + if (!s_accumulator.buffer) { + ESP_LOGE(TAG, "Buffer became NULL before memcpy!"); + ws_accumulator_reset(); + break; + } + + memcpy(s_accumulator.buffer + offset, data->data_ptr, data->data_len); + s_accumulator.received = offset + data->data_len; + + if (s_accumulator.received < s_accumulator.expected_len) { + // wait for more fragments + ESP_LOGD(TAG, "Waiting for more fragments: received=%zu expected=%zu", + s_accumulator.received, s_accumulator.expected_len); + break; + } + + // Completed full message + payload = s_accumulator.buffer; + len = s_accumulator.expected_len; + send_opcode = s_accumulator.opcode; + s_accumulator.active = false; + } + + // Check connection before attempting to send + if (!esp_websocket_client_is_connected(client)) { + ESP_LOGW(TAG, "Connection lost before echo, skipping"); + ws_accumulator_reset(); + break; + } + + int sent = -1; + int attempt = 0; + const TickType_t backoff[] = {1, 1, 1, 2, 4, 8}; // Shorter backoff for faster retry + int64_t start = esp_timer_get_time(); + + /* Send echo immediately - use timeout scaled by frame size for large frames */ + /* For large messages (>16KB), the send function fragments into multiple chunks */ + /* Each chunk needs sufficient timeout, so scale timeout per chunk, not total message */ + while (sent < 0 && esp_websocket_client_is_connected(client)) { + /* For zero-length payload, pass NULL pointer (API handles this correctly) */ + /* Calculate timeout per chunk: large messages are fragmented, each chunk needs time */ +#if CONFIG_IDF_TARGET_LINUX + int send_timeout_ms = 5; // Default 5ms for small frames + if (len > 1024) { + send_timeout_ms = 500; // 500ms per chunk for large messages + } else { + send_timeout_ms = (len / 256) + 10; + if (send_timeout_ms > 100) { + send_timeout_ms = 100; + } + } + TickType_t send_timeout = pdMS_TO_TICKS(send_timeout_ms); + ESP_LOGD(TAG, "Sending echo: opcode=0x%02X len=%zu timeout=%dms", + send_opcode, len, send_timeout_ms); +#else + TickType_t send_timeout = pdMS_TO_TICKS(5); // Default 5ms for small frames + if (len > 1024) { + // For large messages, use a per-chunk timeout that accounts for network delays + // Since messages are fragmented into ~16KB chunks, each chunk needs sufficient time + // Use a fixed generous timeout per chunk for large messages (500ms per chunk) + // For 65535 bytes = 4 chunks, total time could be up to 2 seconds + send_timeout = pdMS_TO_TICKS(500); // 500ms per chunk for large messages + } else { + // Small messages: scale timeout based on size + send_timeout = pdMS_TO_TICKS((len / 256) + 10); + if (send_timeout > pdMS_TO_TICKS(100)) { + send_timeout = pdMS_TO_TICKS(100); + } + } + + ESP_LOGD(TAG, "Sending echo: opcode=0x%02X len=%zu timeout=%lums", + send_opcode, len, (unsigned long)(send_timeout * portTICK_PERIOD_MS)); +#endif + + sent = esp_websocket_client_send_with_opcode( + client, send_opcode, + (len > 0) ? payload : NULL, len, + send_timeout); + + if (sent >= 0) { + ESP_LOGD(TAG, "Echo sent successfully: %d bytes", sent); + break; + } + ESP_LOGW(TAG, + "echo send retry: opcode=0x%02X len=%zu fin=%d attempt=%d sent=%d", + send_opcode, len, data->fin, attempt + 1, sent); + if (attempt < (int)(sizeof(backoff) / sizeof(backoff[0]))) { +#if CONFIG_IDF_TARGET_LINUX + usleep(backoff[attempt++] * 1000); // Convert ticks to microseconds +#else + vTaskDelay(backoff[attempt++]); +#endif + } else { +#if CONFIG_IDF_TARGET_LINUX + usleep(32 * 1000); +#else + vTaskDelay(32); +#endif + } + } + + int64_t dt = esp_timer_get_time() - start; + if (sent >= 0) { + ESP_LOGI(TAG, "Echo success: opcode=0x%02X len=%d fin=%d in %lldus", + data->op_code, sent, data->fin, (long long)dt); + } else { + ESP_LOGE(TAG, "Echo failed: opcode=0x%02X len=%d fin=%d", + data->op_code, (int)len, data->fin); + } + break; + } + + case WEBSOCKET_EVENT_ERROR: + ESP_LOGW(TAG, "WebSocket error event"); + test_running = false; + ws_accumulator_reset(); // Reset accumulator on error +#if CONFIG_IDF_TARGET_LINUX + if (test_done_sem) { + sem_post(test_done_sem); + } +#else + if (test_done_sem) { + xSemaphoreGive(test_done_sem); + } +#endif + break; + + case WEBSOCKET_EVENT_FINISH: + ESP_LOGD(TAG, "WebSocket finish event"); + test_running = false; + ws_accumulator_reset(); // Reset accumulator on finish +#if CONFIG_IDF_TARGET_LINUX + if (test_done_sem) { + sem_post(test_done_sem); + } +#else + if (test_done_sem) { + xSemaphoreGive(test_done_sem); + } +#endif + break; + + default: + break; + } +} + +/* ------------------------------------------------------------ */ +static esp_err_t run_test_case(int case_num) +{ + char uri[512]; // Increased to accommodate full URI + path + int ret = snprintf(uri, sizeof(uri), + "%s/runCase?case=%d&agent=esp_websocket_client", + AUTOBAHN_SERVER_URI, case_num); + if (ret < 0 || ret >= (int)sizeof(uri)) { + ESP_LOGE(TAG, "URI too long: %s/runCase?case=%d&agent=esp_websocket_client", AUTOBAHN_SERVER_URI, case_num); + return ESP_ERR_INVALID_ARG; + } + ESP_LOGI(TAG, "Running case %d: %s", case_num, uri); + + esp_websocket_client_config_t cfg = { + .uri = uri, + .buffer_size = BUFFER_SIZE, + .network_timeout_ms = 10000, // 10s for connection (default), 200ms was too short + .reconnect_timeout_ms = 500, + .task_prio = 10, // High prio → low latency + .task_stack = 8144, + }; + + // If accumulator buffer is not allocated yet, try to allocate it now + // (before client init to avoid fragmentation) + if (!s_accum_buffer) { + ESP_LOGD(TAG, "Attempting to allocate accumulator buffer before client init (Free heap: %lu)", + esp_get_free_heap_size()); + s_accum_buffer = (uint8_t *)malloc(MAX_FRAGMENTED_PAYLOAD); + if (s_accum_buffer) { + ESP_LOGD(TAG, "Successfully allocated accumulator buffer: %d bytes", MAX_FRAGMENTED_PAYLOAD); + } + } + + esp_websocket_client_handle_t client = esp_websocket_client_init(&cfg); + if (!client) { + return ESP_FAIL; + } + + esp_websocket_register_events(client, WEBSOCKET_EVENT_ANY, + websocket_event_handler, (void*)client); + +#if CONFIG_IDF_TARGET_LINUX + sem_init(&test_done_sem_storage, 0, 0); + test_done_sem = &test_done_sem_storage; +#else + test_done_sem = xSemaphoreCreateBinary(); +#endif + + esp_err_t start_ret = esp_websocket_client_start(client); + if (start_ret != ESP_OK) { + ESP_LOGE(TAG, "esp_websocket_client_start() failed: err=0x%x", start_ret); +#if CONFIG_IDF_TARGET_LINUX + if (test_done_sem) { + sem_destroy(test_done_sem); + } +#else + if (test_done_sem) { + vSemaphoreDelete(test_done_sem); + } +#endif + test_done_sem = NULL; + esp_websocket_client_destroy(client); + return start_ret; + } + + /* Wait up to 60 s so server can close properly */ +#if CONFIG_IDF_TARGET_LINUX + { + struct timespec ts; + clock_gettime(CLOCK_REALTIME, &ts); + ts.tv_sec += 60; // absolute timeout (now + 60s) + (void)sem_timedwait(test_done_sem, &ts); + } +#else + xSemaphoreTake(test_done_sem, pdMS_TO_TICKS(60000)); +#endif + + if (esp_websocket_client_is_connected(client)) { + esp_websocket_client_stop(client); + } + + esp_websocket_client_destroy(client); +#if CONFIG_IDF_TARGET_LINUX + if (test_done_sem) { + sem_destroy(test_done_sem); + } +#else + if (test_done_sem) { + vSemaphoreDelete(test_done_sem); + } +#endif + test_done_sem = NULL; + ESP_LOGI(TAG, "Free heap: %lu", esp_get_free_heap_size()); + return ESP_OK; +} + +/* ------------------------------------------------------------ */ +static void update_reports(void) +{ + char uri[512]; // Increased to accommodate full URI + path + int ret = snprintf(uri, sizeof(uri), + "%s/updateReports?agent=esp_websocket_client", + AUTOBAHN_SERVER_URI); + if (ret < 0 || ret >= (int)sizeof(uri)) { + ESP_LOGE(TAG, "URI too long: %s/updateReports?agent=esp_websocket_client", AUTOBAHN_SERVER_URI); + return; + } + esp_websocket_client_config_t cfg = { .uri = uri }; + esp_websocket_client_handle_t client = esp_websocket_client_init(&cfg); + if (!client) { + ESP_LOGE(TAG, "Failed to initialize WebSocket client for update_reports"); + return; + } + esp_err_t start_ret = esp_websocket_client_start(client); + if (start_ret != ESP_OK) { + ESP_LOGE(TAG, "esp_websocket_client_start() failed for update_reports: err=0x%x", start_ret); + esp_websocket_client_destroy(client); + return; + } +#if CONFIG_IDF_TARGET_LINUX + usleep(3000 * 1000); // 3 seconds +#else + vTaskDelay(pdMS_TO_TICKS(3000)); +#endif + esp_websocket_client_stop(client); + esp_websocket_client_destroy(client); + ESP_LOGI(TAG, "Reports updated"); +} + +/* ------------------------------------------------------------ */ +static void websocket_app_start(void) +{ + ESP_LOGI(TAG, "===================================="); + ESP_LOGI(TAG, " Autobahn WebSocket Testsuite Client"); + ESP_LOGI(TAG, "===================================="); + + ESP_LOGI(TAG, "Server: %s", AUTOBAHN_SERVER_URI); + + // Accumulator buffer should already be allocated in app_main() before any clients + // If not, it will be allocated on-demand when first fragmented frame is detected + if (s_accum_buffer) { + ESP_LOGI(TAG, "Accumulator buffer ready: %d bytes", MAX_FRAGMENTED_PAYLOAD); + } else { + ESP_LOGW(TAG, "Accumulator buffer not pre-allocated, will allocate on-demand (max %d bytes)", MAX_FRAGMENTED_PAYLOAD); + } + for (int i = START_CASE; i <= END_CASE; i++) { + ESP_LOGI(TAG, "========== Case %d/%d ==========", i, END_CASE); + ESP_LOGI(TAG, "Starting test case %d...", i); + esp_err_t ret = run_test_case(i); + if (ret != ESP_OK) { + ESP_LOGW(TAG, "Test case %d failed with error: 0x%x", i, ret); + } else { + ESP_LOGI(TAG, "Test case %d completed", i); + } +#if CONFIG_IDF_TARGET_LINUX + usleep(500 * 1000); // 500ms +#else + vTaskDelay(pdMS_TO_TICKS(500)); +#endif + } + update_reports(); + + // Free accumulator buffer after all tests + ws_accumulator_cleanup(); + ESP_LOGI(TAG, "All tests completed."); +} + +#if CONFIG_WEBSOCKET_URI_FROM_STDIN +/* ------------------------------------------------------------ + * Read URI from stdin (similar to websocket_example.c) + * ------------------------------------------------------------ */ +static void get_string(char *line, size_t size) +{ + int count = 0; + while (count < size - 1) { + int c = fgetc(stdin); + if (c == '\n' || c == '\r') { + line[count] = '\0'; + break; + } else if (c > 0 && c < 127) { + line[count] = c; + ++count; + } else if (c == EOF) { +#if CONFIG_IDF_TARGET_LINUX + usleep(10 * 1000); // 10ms +#else + vTaskDelay(10 / portTICK_PERIOD_MS); +#endif + } + } + line[count] = '\0'; +} +#endif /* CONFIG_WEBSOCKET_URI_FROM_STDIN */ + +/* ------------------------------------------------------------ */ +#if CONFIG_IDF_TARGET_LINUX +int main(void) +#else +void app_main(void) +#endif +{ + // Disable stdout buffering for immediate output + setvbuf(stdout, NULL, _IONBF, 0); + setvbuf(stderr, NULL, _IONBF, 0); + + ESP_LOGI(TAG, "Startup, IDF %s", esp_get_idf_version()); +#if !CONFIG_IDF_TARGET_LINUX + ESP_ERROR_CHECK(nvs_flash_init()); +#endif + ESP_ERROR_CHECK(esp_netif_init()); + ESP_ERROR_CHECK(esp_event_loop_create_default()); + + // Allocate accumulator buffer early, before any WebSocket clients are created + // This ensures we have enough contiguous memory before heap gets fragmented + // ESP32-S2 has limited RAM (~320KB total), so we need to allocate this early + ESP_LOGI(TAG, "Allocating accumulator buffer early (Free heap: %lu)", esp_get_free_heap_size()); + s_accum_buffer = (uint8_t *)malloc(MAX_FRAGMENTED_PAYLOAD); + if (!s_accum_buffer) { + ESP_LOGE(TAG, "Failed to allocate accumulator buffer (%d bytes) - Free heap: %lu", + MAX_FRAGMENTED_PAYLOAD, esp_get_free_heap_size()); + ESP_LOGE(TAG, "ESP32-S2 may not have enough RAM for 64KB buffer. Consider:"); + ESP_LOGE(TAG, " 1. Reducing BUFFER_SIZE further (currently %d)", BUFFER_SIZE); + ESP_LOGE(TAG, " 2. Using SPIRAM if available"); + ESP_LOGE(TAG, " 3. Skipping large payload tests (case 1.1.6)"); + // Continue anyway - will try on-demand allocation later + } else { + ESP_LOGI(TAG, "Successfully allocated accumulator buffer: %d bytes (Free heap: %lu)", + MAX_FRAGMENTED_PAYLOAD, esp_get_free_heap_size()); + } + + ESP_ERROR_CHECK(example_connect()); + +#if !CONFIG_IDF_TARGET_LINUX + /* disable power‑save for low latency */ + esp_wifi_set_ps(WIFI_PS_NONE); +#endif + +#if CONFIG_WEBSOCKET_URI_FROM_STDIN + // Read server URI from stdin + ESP_LOGI(TAG, "Waiting for Autobahn server URI from stdin..."); + ESP_LOGI(TAG, "Please send URI in format: ws://:9001"); + // Loop until we get a valid URI + do { + get_string(g_autobahn_server_uri, sizeof(g_autobahn_server_uri)); + // Ensure null termination + g_autobahn_server_uri[sizeof(g_autobahn_server_uri) - 1] = '\0'; + } while (strlen(g_autobahn_server_uri) == 0); + + ESP_LOGI(TAG, "Received server URI: %s", g_autobahn_server_uri); +#endif + + websocket_app_start(); +#if CONFIG_IDF_TARGET_LINUX + return 0; +#endif +} diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/idf_component.yml b/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/idf_component.yml new file mode 100644 index 0000000000..6e290ba3e7 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/testee/main/idf_component.yml @@ -0,0 +1,12 @@ +dependencies: + ## Required IDF version + idf: ">=5.0" + # WebSocket client component from parent directory + espressif/esp_websocket_client: + version: "^1.0.0" + override_path: "../../../../" + # WiFi connection helper from ESP-IDF examples + espressif/cjson: + version: "^1.7.15" + protocol_examples_common: + path: ${IDF_PATH}/examples/common_components/protocol_examples_common diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/testee/sdkconfig.ci.linux b/components/esp_websocket_client/examples/autobahn-testsuite/testee/sdkconfig.ci.linux new file mode 100644 index 0000000000..02baf7376d --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/testee/sdkconfig.ci.linux @@ -0,0 +1,26 @@ +# Linux target configuration for CI +CONFIG_IDF_TARGET="linux" +CONFIG_IDF_TARGET_LINUX=y +CONFIG_ESP_EVENT_POST_FROM_ISR=n +CONFIG_ESP_EVENT_POST_FROM_IRAM_ISR=n + +# Autobahn server URI from stdin (set at runtime) +CONFIG_WEBSOCKET_URI_FROM_STDIN=y +CONFIG_WEBSOCKET_URI_FROM_STRING=n + +# WebSocket configuration +CONFIG_WS_TRANSPORT=y +CONFIG_WS_BUFFER_SIZE=8192 + +# Enable separate TX lock +CONFIG_ESP_WS_CLIENT_SEPARATE_TX_LOCK=y +CONFIG_ESP_WS_CLIENT_TX_LOCK_TIMEOUT_MS=100 + +# Main task stack +CONFIG_ESP_MAIN_TASK_STACK_SIZE=8192 + +# Logging +CONFIG_LOG_DEFAULT_LEVEL_INFO=y + +# For Linux host builds, allow warnings (do not treat default warnings as errors) +CONFIG_COMPILER_DISABLE_DEFAULT_ERRORS=y diff --git a/components/esp_websocket_client/examples/autobahn-testsuite/testee/sdkconfig.ci.target.plain_tcp b/components/esp_websocket_client/examples/autobahn-testsuite/testee/sdkconfig.ci.target.plain_tcp new file mode 100644 index 0000000000..6efc580133 --- /dev/null +++ b/components/esp_websocket_client/examples/autobahn-testsuite/testee/sdkconfig.ci.target.plain_tcp @@ -0,0 +1,15 @@ +CONFIG_IDF_TARGET="esp32" +CONFIG_IDF_TARGET_LINUX=n +CONFIG_WEBSOCKET_URI_FROM_STDIN=y +CONFIG_WEBSOCKET_URI_FROM_STRING=n +CONFIG_EXAMPLE_CONNECT_ETHERNET=y +CONFIG_EXAMPLE_CONNECT_WIFI=n +CONFIG_EXAMPLE_USE_INTERNAL_ETHERNET=y +CONFIG_EXAMPLE_ETH_PHY_IP101=y +CONFIG_EXAMPLE_ETH_MDC_GPIO=23 +CONFIG_EXAMPLE_ETH_MDIO_GPIO=18 +CONFIG_EXAMPLE_ETH_PHY_RST_GPIO=5 +CONFIG_EXAMPLE_ETH_PHY_ADDR=1 +CONFIG_EXAMPLE_CONNECT_IPV6=y +CONFIG_WS_OVER_TLS_MUTUAL_AUTH=n +CONFIG_WS_OVER_TLS_SERVER_AUTH=n