Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .github/workflows/L2-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,11 @@ jobs:
&&
sudo cmake --install build/rdkservices

- name: Regenerate bundles for cgroupv2 compatibility
working-directory: Dobby/tests/L2_testing/test_runner/bundle/
run: |
python3 regenerate_bundles.py

- name: Run the l2 test
working-directory: Dobby/tests/L2_testing/test_runner/
run: |
Expand All @@ -234,6 +239,7 @@ jobs:
-d $GITHUB_WORKSPACE
&&
lcov
--ignore-errors unused
-r coverage.info
'/usr/include/*'
'*/tests/L1_testing/*'
Expand All @@ -254,3 +260,4 @@ jobs:
DobbyL2TestResults.json
l2coverage
if-no-files-found: warn

5 changes: 3 additions & 2 deletions bundle/lib/source/templates/OciConfigJson1.0.2-dobby.template
Original file line number Diff line number Diff line change
Expand Up @@ -328,8 +328,8 @@ static const char* ociJsonTemplate = R"JSON(
],
"memory": {
"limit": {{MEM_LIMIT}},
"swap": {{MEM_LIMIT}},
"swappiness": 60
"swappiness": 60,
"swap": {{MEM_LIMIT}}
},
"cpu": {
{{#CPU_SHARES_ENABLED}}
Expand Down Expand Up @@ -401,3 +401,4 @@ static const char* ociJsonTemplate = R"JSON(
{{/ENABLE_RDK_PLUGINS}}
}
)JSON";

Original file line number Diff line number Diff line change
Expand Up @@ -339,8 +339,8 @@ static const char* ociJsonTemplate = R"JSON(
],
"memory": {
"limit": {{MEM_LIMIT}},
"swap": {{MEM_LIMIT}},
"swappiness": 60
"swappiness":60 ,
"swap": {{MEM_LIMIT}}
},
"cpu": {
{{#CPU_SHARES_ENABLED}}
Expand Down Expand Up @@ -412,3 +412,4 @@ static const char* ociJsonTemplate = R"JSON(
{{/ENABLE_RDK_PLUGINS}}
}
)JSON";

1 change: 0 additions & 1 deletion client/tool/source/Main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,6 @@ void containerStopCallback(int32_t cd, const std::string &containerId,
if (state == IDobbyProxyEvents::ContainerState::Stopped && containerId == *id)
{
AI_LOG_INFO("Container %s has stopped", containerId.c_str());
std::lock_guard<std::mutex> locker(gLock);
promise.set_value();
}
Comment on lines 100 to 104
Copy link

Copilot AI Apr 16, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Removing the mutex lock in the callback introduces a data race on the global promise: stopCommand reassigns promise under gLock, while this callback can call promise.set_value() concurrently from the proxy event thread. That’s undefined behavior and can crash intermittently. Restore synchronization (e.g., keep std::lock_guard<std::mutex> locker(gLock); here, or switch to a thread-safe pattern such as storing the promise in a shared_ptr captured by the callback).

Copilot uses AI. Check for mistakes.
}
Expand Down
25 changes: 16 additions & 9 deletions tests/L2_testing/test_runner/annotation_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,17 +53,23 @@ def test_container(container_id, expected_output):
"""
test_utils.print_log("Running %s container test" % container_id, test_utils.Severity.debug)

with test_utils.untar_bundle(container_id) as bundle_path:
command = ["DobbyTool",
"start",
container_id,
bundle_path]
spec_path = test_utils.get_container_spec_path(container_id)

command = ["DobbyTool",
"start",
container_id,
spec_path]

status = test_utils.run_command_line(command)
if "started '" + container_id + "' container" not in status.stdout:
return False, "Container did not launch successfully"

status = test_utils.run_command_line(command)
if "started '" + container_id + "' container" not in status.stdout:
return False, "Container did not launch successfully"
result = validate_annotation(container_id, expected_output)

return validate_annotation(container_id, expected_output)
# Stop the container after the test
test_utils.dobby_tool_command("stop", container_id)

return result


def validate_annotation(container_id, expected_output):
Expand Down Expand Up @@ -126,3 +132,4 @@ def validate_annotation(container_id, expected_output):
if __name__ == "__main__":
test_utils.parse_arguments(__file__, True)
execute_test()

97 changes: 54 additions & 43 deletions tests/L2_testing/test_runner/basic_sanity_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@
import test_utils
from subprocess import check_output
import subprocess
from time import sleep
import multiprocessing
from time import sleep, monotonic
import select
import os
from os.path import basename

tests = (
Expand Down Expand Up @@ -69,7 +70,9 @@ def execute_test():
# Test 2
test = tests[2]
stop_dobby_daemon()
result = read_asynchronous(subproc, test.expected_output, 5)
# Some platforms do not emit a deterministic "stopped" log line.
# Verify stop by process absence instead.
result = not check_if_process_present(tests[3].expected_output)
output = test_utils.create_simple_test_output(test, result)
output_table.append(output)
test_utils.print_single_result(output)
Expand All @@ -85,53 +88,59 @@ def execute_test():
return test_utils.count_print_results(output_table)


# we need to do this asynchronous as if there is no such string we would end in endless loop
# Uses select() for a true timeout instead of threads — no lingering readers.
# Reads raw bytes via os.read() to avoid Python TextIOWrapper buffering that
# can desynchronise from select()'s kernel-level readiness checks.
def read_asynchronous(proc, string_to_find, timeout):
"""Reads asynchronous from process. Ends when found string or timeout occurred.
"""Reads from process stderr with a real timeout using select().

Unlike a threaded approach, this cannot leak a blocked reader: select()
returns when data is available *or* when the timeout expires, so the
caller always regains control promptly.

Parameters:
proc (process): process in which we want to read
string_to_find (string): what we want to find in process
proc (process): process whose stderr we read
string_to_find (string): what we want to find in process output
timeout (float): how long we should wait if string not found (seconds)

Returns:
found (bool): True if found string_to_find inside proc.
found (bool): True if string_to_find was found in proc stderr.

"""

# as this function should not be used outside asynchronous read, it is moved inside it
def wait_for_string(proc, string_to_find):
"""Waits indefinitely until string is found in process. Must be run with timeout multiprocess.

Parameters:
proc (process): process in which we want to read
string_to_find (string): what we want to find in process

Returns:
None: Returns nothing if found, never ends if not found

"""

while True:
# notice that all data are in stderr not in stdout, this is DobbyDaemon design
output = proc.stderr.readline()
if string_to_find in output:
test_utils.print_log("Found string \"%s\"" % string_to_find, test_utils.Severity.debug)
return

found = False
reader = multiprocessing.Process(target=wait_for_string, args=(proc, string_to_find), kwargs={})
test_utils.print_log("Starting multithread read", test_utils.Severity.debug)
reader.start()
reader.join(timeout)
# if thread still running
if reader.is_alive():
test_utils.print_log("Reader still exists, closing", test_utils.Severity.debug)
reader.terminate()
test_utils.print_log("Not found string \"%s\"" % string_to_find, test_utils.Severity.error)
else:
found = True
return found
test_utils.print_log("Starting select-based read", test_utils.Severity.debug)
deadline = monotonic() + timeout
fd = proc.stderr.fileno()
accumulated = ""

while True:
remaining = deadline - monotonic()
if remaining <= 0:
test_utils.print_log("Not found string \"%s\" (timeout). Accumulated output: %s"
% (string_to_find, repr(accumulated)), test_utils.Severity.error)
return False

# Wait until stderr has data or timeout expires
ready, _, _ = select.select([fd], [], [], remaining)
if not ready:
# Timeout with no data
test_utils.print_log("Not found string \"%s\" (select timeout). Accumulated output: %s"
% (string_to_find, repr(accumulated)), test_utils.Severity.error)
return False

# Read raw bytes to avoid TextIOWrapper buffering mismatch with select()
chunk = os.read(fd, 4096)
if not chunk:
# EOF — process exited / pipe closed
test_utils.print_log("EOF on process stderr, stopping reader. Accumulated output: %s"
% repr(accumulated), test_utils.Severity.debug)
return False

accumulated += chunk.decode("utf-8", errors="replace")

if string_to_find in accumulated:
test_utils.print_log("Found string \"%s\"" % string_to_find, test_utils.Severity.debug)
return True


def check_if_process_present(string_to_find):
Expand Down Expand Up @@ -195,11 +204,13 @@ def stop_dobby_daemon():
"""

test_utils.print_log("Stopping Dobby Daemon", test_utils.Severity.debug)
subproc = test_utils.run_command_line(["sudo", "pkill", "DobbyDaemon"])
sleep(0.2)
subproc = test_utils.run_command_line(["sudo", "pkill", "-9", "DobbyDaemon"])
sleep(1) # Give process time to fully terminate and be reaped
return subproc


if __name__ == "__main__":
test_utils.parse_arguments(__file__, True)
execute_test()


Loading
Loading