Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions src/transformers/models/stablelm/configuration_stablelm.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,8 @@ class StableLmConfig(PreTrainedConfig, RotaryEmbeddingConfigMixin):
The id of the `BOS` token in the vocabulary.
eos_token_id (int, *optional*, defaults to 0):
The id of the `EOS` token in the vocabulary.
pad_token_id (int, *optional*):
The id of the `PAD` token in the vocabulary.

Example:

Expand Down Expand Up @@ -122,6 +124,7 @@ def __init__(
attention_dropout: float | None = 0.0,
bos_token_id: int | None = 0,
eos_token_id: int | None = 0,
pad_token_id: int | None = None,
**kwargs,
):
self.vocab_size = vocab_size
Expand All @@ -147,6 +150,7 @@ def __init__(

self.bos_token_id = bos_token_id
self.eos_token_id = eos_token_id
self.pad_token_id = pad_token_id
self.tie_word_embeddings = tie_word_embeddings
super().__init__(**kwargs)

Expand Down
10 changes: 10 additions & 0 deletions tests/models/stablelm/test_modeling_stablelm.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,16 @@ class StableLmModelTester(CausalLMModelTester):
@require_torch
class StableLmModelTest(CausalLMModelTest, unittest.TestCase):
model_tester_class = StableLmModelTester
def test_config_has_pad_token_id(self):
"""Test that StableLmConfig includes pad_token_id attribute (fixes #43572)"""
from transformers import StableLmConfig

config = StableLmConfig()
self.assertTrue(hasattr(config, "pad_token_id"))

# Verify model can be instantiated from config without AttributeError
model = StableLmModel(config)
self.assertIsNotNone(model)


@require_torch
Expand Down