Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/transformers/configuration_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,7 @@ def __init__(self, **kwargs):

@property
def name_or_path(self) -> str:
return self._name_or_path
return getattr(self, "_name_or_path", None)

@name_or_path.setter
def name_or_path(self, value):
Expand Down
4 changes: 2 additions & 2 deletions src/transformers/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,10 +621,10 @@ def tie_weights(self):
weights instead.
"""
output_embeddings = self.get_output_embeddings()
if output_embeddings is not None and self.config.tie_word_embeddings:
if output_embeddings is not None and getattr(self.config, "tie_word_embeddings", True):
self._tie_or_clone_weights(output_embeddings, self.get_input_embeddings())

if self.config.is_encoder_decoder and self.config.tie_encoder_decoder:
if getattr(self.config, "is_encoder_decoder", False) and getattr(self.config, "tie_encoder_decoder", False):
if hasattr(self, self.base_model_prefix):
self = getattr(self, self.base_model_prefix)
self._tie_encoder_decoder_weights(self.encoder, self.decoder, self.base_model_prefix)
Expand Down
13 changes: 11 additions & 2 deletions tests/test_modeling_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,14 +59,14 @@

sys.path.append(str(Path(__file__).parent.parent / "utils"))

from test_module.custom_configuration import CustomConfig # noqa E402
from test_module.custom_configuration import CustomConfig, NoSuperInitConfig # noqa E402


if is_torch_available():
import torch
from torch import nn

from test_module.custom_modeling import CustomModel
from test_module.custom_modeling import CustomModel, NoSuperInitModel
from transformers import (
BERT_PRETRAINED_MODEL_ARCHIVE_LIST,
MODEL_FOR_CAUSAL_IMAGE_MODELING_MAPPING,
Expand Down Expand Up @@ -2091,6 +2091,15 @@ def test_model_from_pretrained_torch_dtype(self):
model = AutoModel.from_pretrained(TINY_T5, torch_dtype=torch.float16)
self.assertEqual(model.dtype, torch.float16)

def test_no_super_init_config_and_model(self):
config = NoSuperInitConfig(attribute=32)
model = NoSuperInitModel(config)

with tempfile.TemporaryDirectory() as tmp_dir:
model.save_pretrained(tmp_dir)

model = NoSuperInitModel.from_pretrained(tmp_dir)


@require_torch
@is_staging_test
Expand Down
7 changes: 7 additions & 0 deletions utils/test_module/custom_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,10 @@ class CustomConfig(PretrainedConfig):
def __init__(self, attribute=1, **kwargs):
self.attribute = attribute
super().__init__(**kwargs)


class NoSuperInitConfig(PretrainedConfig):
model_type = "custom"

def __init__(self, attribute=1, **kwargs):
self.attribute = attribute
17 changes: 16 additions & 1 deletion utils/test_module/custom_modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from transformers import PreTrainedModel

from .custom_configuration import CustomConfig
from .custom_configuration import CustomConfig, NoSuperInitConfig


class CustomModel(PreTrainedModel):
Expand All @@ -18,3 +18,18 @@ def forward(self, x):

def _init_weights(self, module):
pass


class NoSuperInitModel(PreTrainedModel):
config_class = NoSuperInitConfig
base_model_prefix = "custom"

def __init__(self, config):
super().__init__(config)
self.linear = torch.nn.Linear(config.attribute, config.attribute)

def forward(self, x):
return self.linear(x)

def _init_weights(self, module):
pass