We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 04fb8eb commit 0d80f62Copy full SHA for 0d80f62
torchtitan/experiments/transformers_backend/model/model.py
@@ -171,7 +171,8 @@ def _init_weights_patched(self, module):
171
172
if isinstance(module, layer_idx_classes):
173
if not hasattr(module, "layer_idx"):
174
- return
+ raise ValueError(f"Module {module} does not have a layer_idx attribute")
175
+
176
layer_idx = module.layer_idx
177
178
if hasattr(config, "depth_init") and config.depth_init:
0 commit comments