Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from setuptools import find_packages, setup


VERSION = "0.15.2.dev0"
VERSION = "0.16.0"

extras = {}
extras["quality"] = [
Expand Down
2 changes: 1 addition & 1 deletion src/peft/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

__version__ = "0.15.2.dev0"
__version__ = "0.16.0"

from .auto import (
MODEL_TYPE_TO_PEFT_MODEL_MAPPING,
Expand Down
2 changes: 1 addition & 1 deletion tests/test_gpu_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -3807,7 +3807,7 @@ def setUp(self):
# torchao breaks with fp16 and if a previous test uses fp16, transformers will set this env var, which affects
# subsequent tests, therefore the env var needs to be cleared explicitly
#
# TODO: remove this once https://github.com/huggingface/transformers/pull/34886 is merged
# TODO: remove this once https://github.com/huggingface/transformers/pull/37259 is merged
os.environ.pop("ACCELERATE_MIXED_PRECISION", None)

def tearDown(self):
Expand Down
3 changes: 0 additions & 3 deletions tests/testing_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1393,9 +1393,6 @@ def _test_peft_model_device_map(self, model_id, config_cls, config_kwargs):
def _test_training_prompt_learning_tasks(self, model_id, config_cls, config_kwargs):
if not issubclass(config_cls, PromptLearningConfig):
return pytest.skip(f"Test not applicable for {config_cls}")
if ("gemma" in model_id.lower()) and (config_cls == PrefixTuningConfig):
# TODO might be caused by the 4d causal attention mask of gemma
return pytest.skip("Prefix tuning + gemma is currently failing")

with hub_online_once(model_id):
model = self.transformers_class.from_pretrained(model_id)
Expand Down
Loading