Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

【auto_parallel】Add checkpoint convertor #8847

Merged
merged 31 commits into from
Aug 22, 2024
Merged
Show file tree
Hide file tree
Changes from 25 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 27 additions & 15 deletions paddlenlp/trainer/auto_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@

from ..utils.log import logger
from .argparser import strtobool
from .ckpt_converter import CheckpointConverter

Check warning on line 31 in paddlenlp/trainer/auto_trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/auto_trainer.py#L31

Added line #L31 was not covered by tests
from .trainer import SCALER_NAME, SCHEDULER_NAME, TRAINER_STATE_NAME, TRAINING_ARGS_NAME
from .trainer_callback import TrainerState
from .trainer_utils import ( # set_hyrbid_parallel_seed,
Expand Down Expand Up @@ -695,20 +696,16 @@
)
)

ckpt_path = os.path.join(resume_from_checkpoint, DIST_CKPT_PATH)

if not os.path.isdir(ckpt_path):
raise ValueError(f"Can't find a valid checkpoint at {resume_from_checkpoint}")

if self.args.to_static:
opt_state_dict = {
model_state_dict = {

Check warning on line 700 in paddlenlp/trainer/auto_trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/auto_trainer.py#L700

Added line #L700 was not covered by tests
key: value
for key, value in self.model_wrapped.state_dict("opt").items()
for key, value in self.model_wrapped.state_dict("param").items()
if not any(keyword in key for keyword in FREE_SVAE_LOAD_KEY_PATTERNS)
}
state_dict = {
MODEL_NAME: self.model_wrapped.state_dict("param"),
OPTIMIZER_NAME: opt_state_dict,
optim_state_dict = {

Check warning on line 705 in paddlenlp/trainer/auto_trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/auto_trainer.py#L705

Added line #L705 was not covered by tests
key: value
for key, value in self.model_wrapped.state_dict("opt").items()
if not any(keyword in key for keyword in FREE_SVAE_LOAD_KEY_PATTERNS)
}
else:
model_state_dict = self.model_wrapped.state_dict()
Expand All @@ -721,12 +718,27 @@
optim_state_dict = self.optimizer.state_dict()
optim_state_dict.pop("LR_Scheduler", None)

state_dict = {
MODEL_NAME: model_state_dict,
OPTIMIZER_NAME: optim_state_dict,
}
state_dict = {

Check warning on line 721 in paddlenlp/trainer/auto_trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/auto_trainer.py#L721

Added line #L721 was not covered by tests
MODEL_NAME: model_state_dict,
OPTIMIZER_NAME: optim_state_dict,
}

self._load_ckpt_func(state_dict, ckpt_path)
parameter_to_structured_name = {}
if self.args.to_static:
parameter_to_structured_name = self.model_wrapped._parameter_to_structured_name

Check warning on line 728 in paddlenlp/trainer/auto_trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/auto_trainer.py#L726-L728

Added lines #L726 - L728 were not covered by tests
else:
for state_name, state_value in self.model_wrapped.state_dict().items():
parameter_to_structured_name[state_value.name] = state_name

Check warning on line 731 in paddlenlp/trainer/auto_trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/auto_trainer.py#L730-L731

Added lines #L730 - L731 were not covered by tests

if self.args.resume_form_hybrid_parallel:
CheckpointConverter(

Check warning on line 734 in paddlenlp/trainer/auto_trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/auto_trainer.py#L733-L734

Added lines #L733 - L734 were not covered by tests
resume_from_checkpoint, state_dict, parameter_to_structured_name
).load_from_hybrid_parallel_checkpoint()
else:
ckpt_path = os.path.join(resume_from_checkpoint, DIST_CKPT_PATH)
if not os.path.isdir(ckpt_path):
raise ValueError(f"Can't find a valid checkpoint at {resume_from_checkpoint}")
self._load_ckpt_func(state_dict, ckpt_path)

Check warning on line 741 in paddlenlp/trainer/auto_trainer.py

View check run for this annotation

Codecov / codecov/patch

paddlenlp/trainer/auto_trainer.py#L738-L741

Added lines #L738 - L741 were not covered by tests

# release memory
del state_dict
Loading
Loading