Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,7 @@
"pandas<2.3.0", # `datasets` requires `pandas` while `pandas==2.3.0` has issues with CircleCI on 2025/06/05
"packaging>=20.0",
"parameterized>=0.9", # older version of parameterized cause pytest collection to fail on .expand
"peft>=0.18.0",
"phonemizer",
"protobuf",
"psutil",
Expand Down
1 change: 1 addition & 0 deletions src/transformers/dependency_versions_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
"pandas": "pandas<2.3.0",
"packaging": "packaging>=20.0",
"parameterized": "parameterized>=0.9",
"peft": "peft>=0.18.0",
"phonemizer": "phonemizer",
"protobuf": "protobuf",
"psutil": "psutil",
Expand Down
76 changes: 7 additions & 69 deletions src/transformers/integrations/peft.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import importlib.metadata
import inspect
import json
import os
import re
from typing import Any, Literal

from packaging import version

from ..utils import (
CONFIG_NAME,
cached_file,
Expand All @@ -43,7 +40,7 @@
from accelerate.utils import get_balanced_memory, infer_auto_device_map

# Minimum PEFT version supported for the integration
MIN_PEFT_VERSION = "0.5.0"
MIN_PEFT_VERSION = "0.18.0"


logger = logging.get_logger(__name__)
Expand Down Expand Up @@ -79,7 +76,7 @@ class PeftAdapterMixin:
prompt tuning, prompt learning are out of scope as these adapters are not "injectable" into a torch module. For
using these methods, please refer to the usage guide of PEFT library.

With this mixin, if the correct PEFT version is installed, it is possible to:
With this mixin, if the correct PEFT version is installed (>= 0.18.0), it is possible to:

- Load an adapter stored on a local path or in a remote Hub repository, and inject it in the model
- Attach new adapters in the model and train them with Trainer or by your own.
Expand Down Expand Up @@ -157,7 +154,6 @@ def load_adapter(
dicts.
low_cpu_mem_usage (`bool`, *optional*, defaults to `False`):
Reduce memory usage while loading the PEFT adapter. This should also speed up the loading process.
Requires PEFT version 0.13.0 or higher.
is_trainable (`bool`, *optional*, defaults to `False`):
Whether the adapter should be trainable or not. If `False`, the adapter will be frozen and can only be
used for inference.
Expand Down Expand Up @@ -208,9 +204,6 @@ def load_adapter(
hotswap = hotswap_enabled and not_first_adapter

if hotswap:
min_version_hotswap = "0.15.0"
if version.parse(importlib.metadata.version("peft")) < version.parse(min_version_hotswap):
raise ValueError(f"To hotswap the adapter, you need PEFT >= v{min_version_hotswap}.")
if (not self._hf_peft_config_loaded) or (adapter_name not in self.peft_config):
raise ValueError(
"To hotswap an adapter, there must already be an existing adapter with the same adapter name."
Expand All @@ -223,15 +216,7 @@ def load_adapter(
key_mapping = adapter_kwargs.pop("key_mapping", None) if adapter_kwargs is not None else None
if key_mapping is None and any(allowed_name in self.__class__.__name__.lower() for allowed_name in VLMS):
key_mapping = self._checkpoint_conversion_mapping
if low_cpu_mem_usage:
min_version_lcmu = "0.13.0"
if version.parse(importlib.metadata.version("peft")) >= version.parse(min_version_lcmu):
peft_load_kwargs["low_cpu_mem_usage"] = low_cpu_mem_usage
else:
raise ValueError(
"The version of PEFT you are using does not support `low_cpu_mem_usage` yet, "
f"please install PEFT >= {min_version_lcmu}."
)
peft_load_kwargs["low_cpu_mem_usage"] = low_cpu_mem_usage

adapter_name = adapter_name if adapter_name is not None else "default"
if adapter_kwargs is None:
Expand Down Expand Up @@ -427,10 +412,6 @@ def enable_peft_hotswap(
- "warn": issue a warning
- "ignore": do nothing
"""
min_version_hotswap = "0.15.0"
if version.parse(importlib.metadata.version("peft")) < version.parse(min_version_hotswap):
raise ValueError(f"To hotswap the adapter, you need PEFT >= v{min_version_hotswap}.")

if getattr(self, "peft_config", {}):
if check_compiled == "error":
raise RuntimeError("Call `enable_peft_hotswap` before loading the first adapter.")
Expand Down Expand Up @@ -519,11 +500,7 @@ def set_adapter(self, adapter_name: list[str] | str) -> None:

for _, module in self.named_modules():
if isinstance(module, (BaseTunerLayer, ModulesToSaveWrapper)):
# For backward compatibility with previous PEFT versions
if hasattr(module, "set_adapter"):
module.set_adapter(adapter_name)
else:
module.active_adapter = adapter_name
module.set_adapter(adapter_name)
_adapters_has_been_set = True

if not _adapters_has_been_set:
Expand All @@ -548,11 +525,7 @@ def disable_adapters(self) -> None:

for _, module in self.named_modules():
if isinstance(module, (BaseTunerLayer, ModulesToSaveWrapper)):
# The recent version of PEFT need to call `enable_adapters` instead
if hasattr(module, "enable_adapters"):
module.enable_adapters(enabled=False)
else:
module.disable_adapters = True
module.enable_adapters(enabled=False)

def enable_adapters(self) -> None:
"""
Expand All @@ -570,11 +543,7 @@ def enable_adapters(self) -> None:

for _, module in self.named_modules():
if isinstance(module, BaseTunerLayer):
# The recent version of PEFT need to call `enable_adapters` instead
if hasattr(module, "enable_adapters"):
module.enable_adapters(enabled=True)
else:
module.disable_adapters = False
module.enable_adapters(enabled=True)

def active_adapters(self) -> list[str]:
"""
Expand All @@ -589,9 +558,6 @@ def active_adapters(self) -> list[str]:
"""
check_peft_version(min_version=MIN_PEFT_VERSION)

if not is_peft_available():
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I deleted this check because check_peft_version already checks if PEFT is available, meaning it is redundant. This change is not strictly related to the min version bump.

raise ImportError("PEFT is not available. Please install PEFT to use this function: `pip install peft`.")

if not self._hf_peft_config_loaded:
raise ValueError("No adapter loaded. Please load an adapter first.")

Expand Down Expand Up @@ -703,39 +669,11 @@ def delete_adapter(self, adapter_names: list[str] | str) -> None:
"""

check_peft_version(min_version=MIN_PEFT_VERSION)
min_version_delete_adapter = "0.18.0"

if not self._hf_peft_config_loaded:
raise ValueError("No adapter loaded. Please load an adapter first.")

# TODO: delete old version once support for PEFT < 0.18.0 is dropped
def old_delete_adapter(model, adapter_name, prefix=None):
from peft.tuners.tuners_utils import BaseTunerLayer
from peft.utils import ModulesToSaveWrapper

has_modules_to_save = False
for module in model.modules():
if isinstance(module, ModulesToSaveWrapper):
has_modules_to_save |= True
continue
if isinstance(module, BaseTunerLayer):
if hasattr(module, "delete_adapter"):
module.delete_adapter(adapter_name)
else:
raise ValueError(
"The version of PEFT you are using is not compatible, please use a version that is greater than 0.6.1"
)

if has_modules_to_save:
logger.warning(
"The deleted adapter contains modules_to_save, which could not be deleted. For this to work, PEFT version "
f">= {min_version_delete_adapter} is required."
)

if version.parse(importlib.metadata.version("peft")) >= version.parse(min_version_delete_adapter):
from peft.functional import delete_adapter
else:
delete_adapter = old_delete_adapter
from peft.functional import delete_adapter

if isinstance(adapter_names, str):
adapter_names = [adapter_names]
Expand Down
52 changes: 18 additions & 34 deletions src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
from .hyperparameter_search import ALL_HYPERPARAMETER_SEARCH_BACKENDS, default_hp_search_backend
from .image_processing_utils import BaseImageProcessor
from .integrations.deepspeed import deepspeed_init, deepspeed_load_checkpoint, is_deepspeed_available
from .integrations.peft import MIN_PEFT_VERSION
from .integrations.tpu import tpu_spmd_dataloader
from .modelcard import TrainingSummary
from .modeling_utils import PreTrainedModel, unwrap_model
Expand Down Expand Up @@ -205,7 +206,7 @@
from .trainer_pt_utils import smp_forward_backward, smp_forward_only, smp_gather, smp_nested_concat

if is_peft_available():
from peft import PeftModel
from peft import PeftMixedModel, PeftModel

if is_accelerate_available():
from accelerate import Accelerator, skip_first_batches
Expand All @@ -228,12 +229,7 @@

def _is_peft_model(model):
if is_peft_available():
classes_to_check = (PeftModel,)
# Here we also check if the model is an instance of `PeftMixedModel` introduced in peft>=0.7.0: https://github.com/huggingface/transformers/pull/28321
if version.parse(importlib.metadata.version("peft")) >= version.parse("0.7.0"):
from peft import PeftMixedModel

classes_to_check = (*classes_to_check, PeftMixedModel)
classes_to_check = (PeftModel, PeftMixedModel)
return isinstance(model, classes_to_check)
return False

Expand Down Expand Up @@ -2809,20 +2805,13 @@ def _load_from_checkpoint(self, resume_from_checkpoint, model=None):

# Load adapters following PR # 24096
elif _is_peft_model(model):
# If train a model using PEFT & LoRA, assume that adapter have been saved properly.
# TODO: in the future support only specific min PEFT versions
if (hasattr(model, "active_adapter") or hasattr(model, "active_adapters")) and hasattr(
model, "load_adapter"
):
# If training a model using PEFT, assume that adapter have been saved properly.
if hasattr(model, "active_adapters") and hasattr(model, "load_adapter"):
if os.path.exists(resume_from_checkpoint):
# For BC for older PEFT versions
if hasattr(model, "active_adapters"):
active_adapters = model.active_adapters
if len(active_adapters) > 1:
logger.warning("Multiple active adapters detected will only consider the first adapter")
active_adapter = active_adapters[0]
else:
active_adapter = model.active_adapter
active_adapters = model.active_adapters
if len(active_adapters) > 1:
logger.warning("Multiple active adapters detected will only consider the first adapter")
active_adapter = active_adapters[0]

if adapter_subdirs:
for subdir_name in adapter_subdirs:
Expand All @@ -2838,7 +2827,7 @@ def _load_from_checkpoint(self, resume_from_checkpoint, model=None):
"Check some examples here: https://github.com/huggingface/peft/issues/96"
)
else:
logger.warning("Could not load adapter model, make sure to have `peft>=0.3.0` installed")
logger.warning(f"Could not load adapter model, make sure to have PEFT >= {MIN_PEFT_VERSION} installed")
else:
# We load the sharded checkpoint
load_result = load_sharded_checkpoint(
Expand Down Expand Up @@ -2885,18 +2874,11 @@ def _load_best_model(self):
)
else:
if _is_peft_model(model):
# If train a model using PEFT & LoRA, assume that adapter have been saved properly.
# TODO: in the future support only specific min PEFT versions
if (hasattr(model, "active_adapter") or hasattr(model, "active_adapters")) and hasattr(
model, "load_adapter"
):
# For BC for older PEFT versions
if hasattr(model, "active_adapters"):
active_adapter = model.active_adapters[0]
if len(model.active_adapters) > 1:
logger.warning("Detected multiple active adapters, will only consider the first one")
else:
active_adapter = model.active_adapter
# If training a model using PEFT, assume that adapter have been saved properly.
if hasattr(model, "active_adapters") and hasattr(model, "load_adapter"):
active_adapter = model.active_adapters[0]
if len(model.active_adapters) > 1:
logger.warning("Detected multiple active adapters, will only consider the first one")

if os.path.exists(best_adapter_model_path) or os.path.exists(best_safe_adapter_model_path):
try:
Expand Down Expand Up @@ -2927,7 +2909,9 @@ def _load_best_model(self):
)
has_been_loaded = False
else:
logger.warning("Could not load adapter model, make sure to have `peft>=0.3.0` installed")
logger.warning(
f"Could not load adapter model, make sure to have PEFT >= {MIN_PEFT_VERSION} installed"
)
has_been_loaded = False
else:
# We load the model state dict on the CPU to avoid an OOM error.
Expand Down
5 changes: 1 addition & 4 deletions src/transformers/utils/peft_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,4 @@ def check_peft_version(min_version: str) -> None:
is_peft_version_compatible = version.parse(importlib.metadata.version("peft")) >= version.parse(min_version)

if not is_peft_version_compatible:
raise ValueError(
f"The version of PEFT you are using is not compatible, please use a version that is greater"
f" than {min_version}"
)
raise ValueError(f"The version of PEFT you are using is not compatible, please use a version >= {min_version}")
Loading