Skip to content

Commit

Permalink
Revert "cherry pick dpotrainer (PaddlePaddle#9117)" (PaddlePaddle#9123)
Browse files Browse the repository at this point in the history
This reverts commit 333c29e.
  • Loading branch information
lugimzzz authored Sep 11, 2024
1 parent 333c29e commit 93fdaed
Show file tree
Hide file tree
Showing 12 changed files with 6 additions and 1,844 deletions.
1 change: 0 additions & 1 deletion paddlenlp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@
seq2vec,
trainer,
transformers,
trl,
utils,
version,
)
Expand Down
12 changes: 6 additions & 6 deletions paddlenlp/peft/lora/lora_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class RowSequenceParallelLinear:
RowSequenceParallelLoRALinear,
)

AVAILABLE_LAYERS = [
AVALIABLE_LAYERS = [
ColumnParallelLoRALinear,
ColumnSequenceParallelLoRALinear,
LoRAConv2D,
Expand All @@ -90,7 +90,7 @@ class RowSequenceParallelLinear:
RowParallelQuantizationLoRALinear,
)

AVAILABLE_LAYERS += [
AVALIABLE_LAYERS += [
ColumnParallelQuantizationLoRALinear,
QuantizationLoRALinear,
RowParallelQuantizationLoRALinear,
Expand Down Expand Up @@ -815,20 +815,20 @@ def save_to_aistudio(

def disable_lora(self):
for _, layer in self.model.named_sublayers():
if any(isinstance(layer, lora_layer) for lora_layer in AVAILABLE_LAYERS):
if any(isinstance(layer, lora_layer) for lora_layer in AVALIABLE_LAYERS):
layer.disable_lora = True

def enable_lora(self):
for _, layer in self.model.named_sublayers():
if any(isinstance(layer, lora_layer) for lora_layer in AVAILABLE_LAYERS):
if any(isinstance(layer, lora_layer) for lora_layer in AVALIABLE_LAYERS):
layer.disable_lora = False

def merge(self):
for _, layer in self.model.named_sublayers():
if any(isinstance(layer, lora_layer) for lora_layer in AVAILABLE_LAYERS):
if any(isinstance(layer, lora_layer) for lora_layer in AVALIABLE_LAYERS):
layer.merge()

def unmerge(self):
for _, layer in self.model.named_sublayers():
if any(isinstance(layer, lora_layer) for lora_layer in AVAILABLE_LAYERS):
if any(isinstance(layer, lora_layer) for lora_layer in AVALIABLE_LAYERS):
layer.unmerge()
2 changes: 0 additions & 2 deletions paddlenlp/transformers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@
from .feature_extraction_utils import BatchFeature, FeatureExtractionMixin
from .image_processing_utils import ImageProcessingMixin
from .attention_utils import create_bigbird_rand_mask_idx_list
from .sequence_parallel_utils import AllGatherVarlenOp, sequence_parallel_sparse_mask_labels
from .tensor_parallel_utils import parallel_matmul, parallel_linear, fused_head_and_loss_fn

try:
from paddle.distributed.fleet.utils.sequence_parallel_utils import (
Expand Down
139 changes: 0 additions & 139 deletions paddlenlp/transformers/sequence_parallel_utils.py

This file was deleted.

Loading

0 comments on commit 93fdaed

Please sign in to comment.