From bccda5244824f98814819ae150bdbc2d9fc25295 Mon Sep 17 00:00:00 2001 From: Aryan Date: Thu, 24 Jul 2025 22:01:30 +0200 Subject: [PATCH 1/2] update --- src/diffusers/hooks/group_offloading.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/diffusers/hooks/group_offloading.py b/src/diffusers/hooks/group_offloading.py index 6c89101f5e98..78993ca91311 100644 --- a/src/diffusers/hooks/group_offloading.py +++ b/src/diffusers/hooks/group_offloading.py @@ -367,7 +367,8 @@ def __init__(self): def initialize_hook(self, module): def make_execution_order_update_callback(current_name, current_submodule): def callback(): - logger.debug(f"Adding {current_name} to the execution order") + if not torch.compiler.is_compiling(): + logger.debug(f"Adding {current_name} to the execution order") self.execution_order.append((current_name, current_submodule)) return callback From 66e17e65bb28df8088dad4c127003719e94fac46 Mon Sep 17 00:00:00 2001 From: Aryan Date: Thu, 24 Jul 2025 22:08:58 +0200 Subject: [PATCH 2/2] update --- src/diffusers/hooks/group_offloading.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/diffusers/hooks/group_offloading.py b/src/diffusers/hooks/group_offloading.py index 78993ca91311..1248bedf861c 100644 --- a/src/diffusers/hooks/group_offloading.py +++ b/src/diffusers/hooks/group_offloading.py @@ -405,12 +405,13 @@ def post_forward(self, module, output): # if the missing layers end up being executed in the future. if execution_order_module_names != self._layer_execution_tracker_module_names: unexecuted_layers = list(self._layer_execution_tracker_module_names - execution_order_module_names) - logger.warning( - "It seems like some layers were not executed during the forward pass. This may lead to problems when " - "applying lazy prefetching with automatic tracing and lead to device-mismatch related errors. Please " - "make sure that all layers are executed during the forward pass. The following layers were not executed:\n" - f"{unexecuted_layers=}" - ) + if not torch.compiler.is_compiling(): + logger.warning( + "It seems like some layers were not executed during the forward pass. This may lead to problems when " + "applying lazy prefetching with automatic tracing and lead to device-mismatch related errors. Please " + "make sure that all layers are executed during the forward pass. The following layers were not executed:\n" + f"{unexecuted_layers=}" + ) # Remove the layer execution tracker hooks from the submodules base_module_registry = module._diffusers_hook @@ -438,7 +439,8 @@ def post_forward(self, module, output): for i in range(num_executed - 1): name1, _ = self.execution_order[i] name2, _ = self.execution_order[i + 1] - logger.debug(f"Applying lazy prefetch group offloading from {name1} to {name2}") + if not torch.compiler.is_compiling(): + logger.debug(f"Applying lazy prefetch group offloading from {name1} to {name2}") group_offloading_hooks[i].next_group = group_offloading_hooks[i + 1].group group_offloading_hooks[i].next_group.onload_self = False