Skip to content

Commit 1398fa5

Browse files
authored
[PyTorch Debug] Skip log test on device if it does not support fp8. (NVIDIA#2109)
fix test on old device Signed-off-by: Pawel Gadzinski <pgadzinski@nvidia.com>
1 parent d370608 commit 1398fa5

File tree

1 file changed

+6
-0
lines changed

1 file changed

+6
-0
lines changed

tests/pytorch/debug/test_log.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,9 @@ def read_log(log_dir: str) -> str:
119119

120120

121121
def test_sanity(feature_dirs):
122+
if not fp8_available:
123+
pytest.skip(reason_for_no_fp8)
124+
122125
log_all_stats_config = LOG_QUANTIZED_CONFIG_BASE.format(stats=", ".join(all_stats))
123126
with debug_session(log_all_stats_config, feature_dirs) as log_dir:
124127
model = te.Linear(128, 128, params_dtype=torch.bfloat16)
@@ -207,6 +210,9 @@ def test_numerics(fp8_recipe, feature_dirs):
207210

208211
@pytest.mark.parametrize("layer", ["linear", "transformer"])
209212
def test_log_every_3_or_5_layers(layer, configs_dir, feature_dirs):
213+
if not fp8_available:
214+
pytest.skip(reason_for_no_fp8)
215+
210216
# If layer does not invoke any feature in current iteration,
211217
# then it changed into non-debug mode.
212218
# This test checks whether this works correctly -

0 commit comments

Comments
 (0)