Skip to content

Commit a187b85

Browse files
authored
Remove tied weights from internal attribute if they are not tied (#42871)
fix
1 parent 64c12fd commit a187b85

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

src/transformers/modeling_utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2398,13 +2398,15 @@ def tie_weights(self, missing_keys: Optional[set[str]] = None, recompute_mapping
23982398
source_is_there = source_param_name not in missing_keys
23992399
target_is_there = target_param_name not in missing_keys
24002400
# Both are already present -> it means the config is wrong and do not reflect the actual
2401-
# checkpoint -> let's raise a warning and do nothing
2401+
# checkpoint -> let's raise a warning and NOT tie them
24022402
if source_is_there and target_is_there:
24032403
logger.warning(
24042404
f"The tied weights mapping and config for this model specifies to tie {source_param_name} to "
24052405
f"{target_param_name}, but both are present in the checkpoints, so we will NOT tie them. "
24062406
"You should update the config with `tie_word_embeddings=False` to silence this warning"
24072407
)
2408+
# Remove from internal attribute to correctly reflect actual tied weights
2409+
self.all_tied_weights_keys.pop(target_param_name)
24082410
# Skip to next iteration
24092411
continue
24102412
# We're missing the source but we have the target -> we swap them, tying the parameter that exists

src/transformers/models/sam3_video/modeling_sam3_video.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -505,8 +505,6 @@ class Sam3VideoPreTrainedModel(PreTrainedModel):
505505

506506
@auto_docstring
507507
class Sam3VideoModel(Sam3VideoPreTrainedModel):
508-
all_tied_weights_keys = {}
509-
510508
def __init__(self, config: Sam3VideoConfig):
511509
super().__init__(config)
512510
self.config = config

0 commit comments

Comments
 (0)