From 8a42f763266a593e963c823e3da022da4d3227e5 Mon Sep 17 00:00:00 2001 From: cehongwang Date: Fri, 22 Aug 2025 20:54:31 +0000 Subject: [PATCH 1/3] Added the dynamic check in the validator --- .../dynamo/conversion/aten_ops_converters.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py index 478479173b..438ec00fbd 100644 --- a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py +++ b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py @@ -25,6 +25,7 @@ get_positive_dim, is_only_operator_on_placeholder, ) +from torch_tensorrt.dynamo.utils import DYNAMIC_DIM _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -2758,6 +2759,13 @@ def sort_validator(node: Node, settings: Optional[CompilationSettings] = None) - def topk_sort_validator(k: int) -> bool: + + # topk layer supports dynamic k value but we cannot determine supported dynamic topk value at + # compile time. + if k == DYNAMIC_DIM: + _LOGGER.debug("k value cannot be dynamic!") + return False + if k > 3840: _LOGGER.debug( f"Currently only topk values up to 3840 are supported, got k={k}." From e6753d157e6610de80446ed9275f7668b0e72a7f Mon Sep 17 00:00:00 2001 From: cehongwang Date: Mon, 25 Aug 2025 18:19:28 +0000 Subject: [PATCH 2/3] rewrote the comment --- py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py | 6 ++++-- py/torch_tensorrt/dynamo/conversion/impl/topk.py | 4 ---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py index 438ec00fbd..f73f2ee7ab 100644 --- a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py +++ b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py @@ -2763,12 +2763,14 @@ def topk_sort_validator(k: int) -> bool: # topk layer supports dynamic k value but we cannot determine supported dynamic topk value at # compile time. if k == DYNAMIC_DIM: - _LOGGER.debug("k value cannot be dynamic!") + _LOGGER.debug( + "[top_k validator] Converter does not support k being a dynamic value. Therefore, aten::topk will run in PyTorch" + ) return False if k > 3840: _LOGGER.debug( - f"Currently only topk values up to 3840 are supported, got k={k}." + f"[top_k validator] Currently only topk values up to 3840 are supported, got k={k}. Therefore, aten::topk will run in PyTorch" ) return False return True diff --git a/py/torch_tensorrt/dynamo/conversion/impl/topk.py b/py/torch_tensorrt/dynamo/conversion/impl/topk.py index 053a46ce2b..638cbf599e 100644 --- a/py/torch_tensorrt/dynamo/conversion/impl/topk.py +++ b/py/torch_tensorrt/dynamo/conversion/impl/topk.py @@ -209,10 +209,6 @@ def topk( get_axes_for_reduce_op(get_positive_dim(dim, len(input.shape))), ) - # topk layer supports dynamic k value but we cannot dertermin supported dynamic topk value at - # compile time. - assert k != DYNAMIC_DIM, "k value cannot be dynamic!" - # TensorRT ITopKLayer does not have a sorted flag, it is always returning the sorted topk elements # so here no matter sorted is True or False the returned the topk Tensor object is always sorted set_layer_name(topk_layer, target, f"{name}_topk", source_ir) From 850aa9e06fe90cf23f86ec59bab6280abc618570 Mon Sep 17 00:00:00 2001 From: cehongwang Date: Mon, 17 Nov 2025 23:58:33 +0000 Subject: [PATCH 3/3] changed the warning message --- .../dynamo/conversion/aten_ops_converters.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py index f73f2ee7ab..02b6eb6377 100644 --- a/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py +++ b/py/torch_tensorrt/dynamo/conversion/aten_ops_converters.py @@ -2762,14 +2762,14 @@ def topk_sort_validator(k: int) -> bool: # topk layer supports dynamic k value but we cannot determine supported dynamic topk value at # compile time. - if k == DYNAMIC_DIM: - _LOGGER.debug( - "[top_k validator] Converter does not support k being a dynamic value. Therefore, aten::topk will run in PyTorch" + if k == DYNAMIC_DIM or not isinstance(k, int): + _LOGGER.warning( + "[top_k validator] It's not expected for k to be a dynamic or data-dependent value. aten::topk will run in PyTorch" ) return False if k > 3840: - _LOGGER.debug( + _LOGGER.warning( f"[top_k validator] Currently only topk values up to 3840 are supported, got k={k}. Therefore, aten::topk will run in PyTorch" ) return False