# for name, child in list(model.named_children()): # if required_layers and name not in required_layers: # continue # if len(list(child.children())) > 1: # self.replace_module_with_wrapper(child, required_layers) # continue # if ( # not isinstance(child, torch.nn.Sequential) # and len(list(child.children())) == 0 # ): # wrapped = ModuleReplacements.get_replacement(child) # if not wrapped: # self.logger.info(f"Please register {type(child)}") # continue # setattr(child, "activation_dtype", global_activation_dtype) # setattr(child, "parameter_dtype", global_param_dtype) # setattr(child, "parameter_observer", global_weight_observer) # setattr(child, "activation_observer", global_activation_observer) # set_nested_attr(model, name, wrapped(old_module=child))