Fix _tied_weights_keys mapping for Transformers v5

#10
Files changed (1) hide show
  1. modeling_exaone.py +1 -1
modeling_exaone.py CHANGED
@@ -988,7 +988,7 @@ class ExaoneModel(ExaonePreTrainedModel):
988
  EXAONE_START_DOCSTRING,
989
  )
990
  class ExaoneForCausalLM(ExaonePreTrainedModel, GenerationMixin):
991
- _tied_weights_keys = ["lm_head.weight"]
992
 
993
  def __init__(self, config):
994
  super().__init__(config)
 
988
  EXAONE_START_DOCSTRING,
989
  )
990
  class ExaoneForCausalLM(ExaonePreTrainedModel, GenerationMixin):
991
+ _tied_weights_keys = {"lm_head.weight": "transformer.wte.weight"}
992
 
993
  def __init__(self, config):
994
  super().__init__(config)