From 33db28636c6cf0b554cb38ad39948515bff80a04 Mon Sep 17 00:00:00 2001 From: Adithyan Madhu Date: Thu, 29 Jan 2026 11:04:50 +0530 Subject: [PATCH 1/8] Refactor module access to use PyTorch get/set_submodule API --- auto_round/utils/model.py | 29 ++++++++++------------------- 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/auto_round/utils/model.py b/auto_round/utils/model.py index f4c6e7b75..56ead72d2 100644 --- a/auto_round/utils/model.py +++ b/auto_round/utils/model.py @@ -1055,38 +1055,29 @@ def _to_model_dtype(model, model_dtype): return model + + + + def get_module(module, key): - """Get module from model by key name. + """Get module from model by key name using PyTorch native API. Args: module (torch.nn.Module): original model - key (str): module name to be replaced + key (str): module name """ - name_list = key.split(".") - for name in name_list: - module = getattr(module, name, None) - return module + return module.get_submodule(key) def set_module(model, key, new_module): - """Set new module into model by key name. + """Set new module into model by key name using PyTorch native API. Args: model (torch.nn.Module): original model - key (str): module name to be replaced + key (str): module name new_module (torch.nn.Module): new module to be inserted """ - module = model - name_list = key.split(".") - for name in name_list[:-1]: - if hasattr(module, name): - module = getattr(module, name) - setattr(module, name_list[-1], new_module) - - -# For getting and setting attribution, such as 'lm_head.weight' -get_attr = get_module -set_attr = set_module + model.set_submodule(key, new_module) def get_layer_features(layer): From 348db37d8e009fb8a64a8c9de1aa2d405ae858db Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 29 Jan 2026 05:40:50 +0000 Subject: [PATCH 2/8] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- auto_round/utils/model.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/auto_round/utils/model.py b/auto_round/utils/model.py index 56ead72d2..8f4a1bb87 100644 --- a/auto_round/utils/model.py +++ b/auto_round/utils/model.py @@ -1055,10 +1055,6 @@ def _to_model_dtype(model, model_dtype): return model - - - - def get_module(module, key): """Get module from model by key name using PyTorch native API. From ac968d91064df4ff39e0499d37d3dfbfbb3c4ee2 Mon Sep 17 00:00:00 2001 From: Adithyan Madhu Date: Fri, 30 Jan 2026 10:15:35 +0530 Subject: [PATCH 3/8] restored get_attr and set_attr --- auto_round/utils/model.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/auto_round/utils/model.py b/auto_round/utils/model.py index 56ead72d2..35ec2479f 100644 --- a/auto_round/utils/model.py +++ b/auto_round/utils/model.py @@ -1058,6 +1058,40 @@ def _to_model_dtype(model, model_dtype): +def get_attr(module, key): + """Get attribute from module by key name. + + This function can access both modules and their attributes (like weight, bias). + For accessing only modules, prefer using get_module which uses PyTorch's native API. + + Args: + module (torch.nn.Module): original model + key (str): attribute name (e.g., "layer.weight", "layer.bias") + """ + name_list = key.split(".") + for name in name_list: + module = getattr(module, name, None) + return module + + +def set_attr(model, key, new_attr): + """Set attribute into model by key name. + + This function can set both modules and their attributes (like weight, bias). + For setting only modules, prefer using set_module which uses PyTorch's native API. + + Args: + model (torch.nn.Module): original model + key (str): attribute name (e.g., "layer.weight", "layer.bias") + new_attr (object): new attribute to be inserted + """ + module = model + name_list = key.split(".") + for name in name_list[:-1]: + if hasattr(module, name): + module = getattr(module, name) + setattr(module, name_list[-1], new_attr) + def get_module(module, key): """Get module from model by key name using PyTorch native API. From cc41dd8334f054610729c2fbed6834ea1c826f18 Mon Sep 17 00:00:00 2001 From: Adithyan Madhu Date: Fri, 30 Jan 2026 10:20:42 +0530 Subject: [PATCH 4/8] restored get_attr and set_attr --- auto_round/utils/model.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/auto_round/utils/model.py b/auto_round/utils/model.py index 35ec2479f..2d04dd0ac 100644 --- a/auto_round/utils/model.py +++ b/auto_round/utils/model.py @@ -1067,10 +1067,17 @@ def get_attr(module, key): Args: module (torch.nn.Module): original model key (str): attribute name (e.g., "layer.weight", "layer.bias") + + Raises: + AttributeError: If any attribute in the path is missing """ name_list = key.split(".") for name in name_list: - module = getattr(module, name, None) + if not hasattr(module, name): + raise AttributeError( + f"Attribute '{name}' not found while resolving '{key}'" + ) + module = getattr(module, name) return module @@ -1084,12 +1091,18 @@ def set_attr(model, key, new_attr): model (torch.nn.Module): original model key (str): attribute name (e.g., "layer.weight", "layer.bias") new_attr (object): new attribute to be inserted + + Raises: + AttributeError: If any intermediate attribute in the path is missing """ module = model name_list = key.split(".") for name in name_list[:-1]: - if hasattr(module, name): - module = getattr(module, name) + if not hasattr(module, name): + raise AttributeError( + f"Attribute '{name}' not found while resolving '{key}'" + ) + module = getattr(module, name) setattr(module, name_list[-1], new_attr) From 7ea2a8b704d77d7fe688208188f1b4dc7fbfb28b Mon Sep 17 00:00:00 2001 From: Adithyan Madhu Date: Fri, 30 Jan 2026 10:28:03 +0530 Subject: [PATCH 5/8] restored get_attr and set_attr --- test_fp8_ignore.py | 77 ---------------------------------------------- 1 file changed, 77 deletions(-) delete mode 100644 test_fp8_ignore.py diff --git a/test_fp8_ignore.py b/test_fp8_ignore.py deleted file mode 100644 index fc834324b..000000000 --- a/test_fp8_ignore.py +++ /dev/null @@ -1,77 +0,0 @@ -import torch -import torch.nn as nn - -# ----------------------------- -# Mock FP8 Linear layer -# ----------------------------- -class FP8Linear(nn.Linear): - """Simulates a FP8-native linear layer""" - def __init__(self, in_features, out_features): - super().__init__(in_features, out_features) - # pretend this layer is FP8 - self.fp8 = True - -# ----------------------------- -# Mock model -# ----------------------------- -class MockModel(nn.Module): - def __init__(self): - super().__init__() - self.fc1 = nn.Linear(10, 10) - self.attn1 = FP8Linear(10, 10) # should be auto-detected - self.mlp1 = FP8Linear(10, 10) # should be ignored via ignore_layers - self.fc2 = nn.Linear(10, 10) - - def forward(self, x): - x = self.fc1(x) - x = self.attn1(x) - x = self.mlp1(x) - x = self.fc2(x) - return x - -# ----------------------------- -# Mock helper for FP8 detection -# ----------------------------- -def is_fp8_linear(layer): - return hasattr(layer, "fp8") and layer.fp8 - -# ----------------------------- -# Your revised get_fp_layer_names function -# ----------------------------- -def get_fp_layer_names(model: nn.Module, ignore_layers: str): - not_to_quantized_layers = [] - - # Auto-detect FP8 layers - for n, m in model.named_modules(): - if is_fp8_linear(m): - not_to_quantized_layers.append(n) - print(f"Auto-detected FP8 layer to ignore: {n}") - - # this Processes user-specified ignore_layers - if ignore_layers: - ignore_list = ignore_layers.replace(" ", "").split(",") - for fp_layer in ignore_list: - if not fp_layer: - continue - # matching any layer whose name has the pattern - for n, _ in model.named_modules(): # match by name only - match_name = fp_layer - if fp_layer[-1].isdigit(): - match_name += "." - if match_name in n: - if n not in not_to_quantized_layers: # avoiding duplicates - not_to_quantized_layers.append(n) - print(f"User-specified ignore layer matched: {n}") - - print(f"Final not_to_quantized_layers: {not_to_quantized_layers}") - return not_to_quantized_layers - -# ----------------------------- -# Test the function -# ----------------------------- -model = MockModel() -ignored_layers = get_fp_layer_names(model, ignore_layers="mlp1") - -# Expected output: -# - attn1 (auto-detected FP8) -# - mlp1 (ignored by user) From d698bcaa8f6c5f97c3106b89186fd5fb15203cec Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 30 Jan 2026 04:59:19 +0000 Subject: [PATCH 6/8] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- auto_round/utils/model.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/auto_round/utils/model.py b/auto_round/utils/model.py index 2d04dd0ac..8916f404a 100644 --- a/auto_round/utils/model.py +++ b/auto_round/utils/model.py @@ -1055,35 +1055,30 @@ def _to_model_dtype(model, model_dtype): return model - - - def get_attr(module, key): """Get attribute from module by key name. - + This function can access both modules and their attributes (like weight, bias). For accessing only modules, prefer using get_module which uses PyTorch's native API. Args: module (torch.nn.Module): original model key (str): attribute name (e.g., "layer.weight", "layer.bias") - + Raises: AttributeError: If any attribute in the path is missing """ name_list = key.split(".") for name in name_list: if not hasattr(module, name): - raise AttributeError( - f"Attribute '{name}' not found while resolving '{key}'" - ) + raise AttributeError(f"Attribute '{name}' not found while resolving '{key}'") module = getattr(module, name) return module def set_attr(model, key, new_attr): """Set attribute into model by key name. - + This function can set both modules and their attributes (like weight, bias). For setting only modules, prefer using set_module which uses PyTorch's native API. @@ -1091,7 +1086,7 @@ def set_attr(model, key, new_attr): model (torch.nn.Module): original model key (str): attribute name (e.g., "layer.weight", "layer.bias") new_attr (object): new attribute to be inserted - + Raises: AttributeError: If any intermediate attribute in the path is missing """ @@ -1099,9 +1094,7 @@ def set_attr(model, key, new_attr): name_list = key.split(".") for name in name_list[:-1]: if not hasattr(module, name): - raise AttributeError( - f"Attribute '{name}' not found while resolving '{key}'" - ) + raise AttributeError(f"Attribute '{name}' not found while resolving '{key}'") module = getattr(module, name) setattr(module, name_list[-1], new_attr) From 9ba1488ebf81d33cf722b5f1d10dbe8b26ee0652 Mon Sep 17 00:00:00 2001 From: Adithyan Madhu Date: Mon, 9 Feb 2026 10:06:33 +0530 Subject: [PATCH 7/8] Adopt PyTorch native get_submodule/set_submodule for module operations --- auto_round/utils/model.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/auto_round/utils/model.py b/auto_round/utils/model.py index 9c8c6b641..0a2b86dca 100644 --- a/auto_round/utils/model.py +++ b/auto_round/utils/model.py @@ -930,15 +930,13 @@ def get_attr(module, key): Args: module (torch.nn.Module): original model key (str): attribute name (e.g., "layer.weight", "layer.bias") - - Raises: - AttributeError: If any attribute in the path is missing + + Returns: + The attribute value, or None if not found """ name_list = key.split(".") for name in name_list: - if not hasattr(module, name): - raise AttributeError(f"Attribute '{name}' not found while resolving '{key}'") - module = getattr(module, name) + module = getattr(module, name, None) return module @@ -952,16 +950,12 @@ def set_attr(model, key, new_attr): model (torch.nn.Module): original model key (str): attribute name (e.g., "layer.weight", "layer.bias") new_attr (object): new attribute to be inserted - - Raises: - AttributeError: If any intermediate attribute in the path is missing """ module = model name_list = key.split(".") for name in name_list[:-1]: - if not hasattr(module, name): - raise AttributeError(f"Attribute '{name}' not found while resolving '{key}'") - module = getattr(module, name) + if hasattr(module, name): + module = getattr(module, name) setattr(module, name_list[-1], new_attr) From 84655d7e101bef5f57cba9d0e3ce1b9f2071eb3f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 9 Feb 2026 04:43:50 +0000 Subject: [PATCH 8/8] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- auto_round/utils/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/auto_round/utils/model.py b/auto_round/utils/model.py index 0a2b86dca..0067ba9e6 100644 --- a/auto_round/utils/model.py +++ b/auto_round/utils/model.py @@ -930,7 +930,7 @@ def get_attr(module, key): Args: module (torch.nn.Module): original model key (str): attribute name (e.g., "layer.weight", "layer.bias") - + Returns: The attribute value, or None if not found """