-
Notifications
You must be signed in to change notification settings - Fork 66
Fix: Removal of transformers logger and addition of python native logger #270
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
2b4ce8d
76fffc5
773f0d1
2e36147
8846bc2
4ed3878
efb8363
2ecfaf7
f2e8afb
c44b1dc
abd8abc
5d08efb
a4cc9c2
7fffda7
ba8a972
f1159f9
756d097
bf30ed2
cb846ca
da7acc6
8af5792
697056c
768d93a
ba489b5
dc9a521
f3a984a
024b12e
cfeb709
bf36b36
fe4b6d5
c544f47
f0fcfdb
e65cb2d
4841119
fe8bb05
5ecf4dd
89124ac
bddad06
0777460
3068f51
0866bce
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,84 @@ | ||
| # Copyright The FMS HF Tuning Authors | ||
| # | ||
| # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| # you may not use this file except in compliance with the License. | ||
| # You may obtain a copy of the License at | ||
| # | ||
| # http://www.apache.org/licenses/LICENSE-2.0 | ||
| # | ||
| # Unless required by applicable law or agreed to in writing, software | ||
| # distributed under the License is distributed on an "AS IS" BASIS, | ||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| # See the License for the specific language governing permissions and | ||
| # limitations under the License. | ||
|
|
||
| # SPDX-License-Identifier: Apache-2.0 | ||
| # https://spdx.dev/learn/handling-license-info/ | ||
|
|
||
| # Standard | ||
| from unittest import mock | ||
| import copy | ||
| import logging | ||
| import os | ||
|
|
||
| # First Party | ||
| from tests.test_sft_trainer import TRAIN_ARGS | ||
|
|
||
| # Local | ||
| from tuning.utils.logging import set_log_level | ||
|
|
||
|
|
||
| @mock.patch.dict(os.environ, {}, clear=True) | ||
| def test_set_log_level_for_logger_default(): | ||
| """ | ||
| Ensure that the correct log level is being set for python native logger and | ||
| transformers logger when no env var or CLI flag is passed | ||
| """ | ||
|
|
||
| train_args = copy.deepcopy(TRAIN_ARGS) | ||
| training_args, logger = set_log_level(train_args) | ||
| assert logger.getEffectiveLevel() == logging.WARNING | ||
| assert training_args.log_level == "passive" | ||
|
|
||
|
|
||
| @mock.patch.dict(os.environ, {"LOG_LEVEL": "info"}, clear=True) | ||
| def test_set_log_level_for_logger_with_env_var(): | ||
| """ | ||
| Ensure that the correct log level is being set for python native logger and | ||
| transformers logger when env var LOG_LEVEL is used | ||
| """ | ||
|
|
||
| train_args_env = copy.deepcopy(TRAIN_ARGS) | ||
| training_args, logger = set_log_level(train_args_env) | ||
| assert logger.getEffectiveLevel() == logging.INFO | ||
| assert training_args.log_level == "info" | ||
|
|
||
|
|
||
| @mock.patch.dict(os.environ, {"TRANSFORMERS_VERBOSITY": "info"}, clear=True) | ||
| def test_set_log_level_for_logger_with_set_verbosity_and_cli(): | ||
| """ | ||
| Ensure that the correct log level is being set for python native logger and | ||
| log_level of transformers logger is unchanged when env var TRANSFORMERS_VERBOSITY is used | ||
| and CLI flag is passed | ||
| """ | ||
|
|
||
| train_args = copy.deepcopy(TRAIN_ARGS) | ||
| train_args.log_level = "error" | ||
| training_args, logger = set_log_level(train_args) | ||
| assert logger.getEffectiveLevel() == logging.ERROR | ||
| assert training_args.log_level == "error" | ||
|
|
||
|
|
||
| @mock.patch.dict(os.environ, {"LOG_LEVEL": "info"}, clear=True) | ||
| def test_set_log_level_for_logger_with_env_var_and_cli(): | ||
| """ | ||
| Ensure that the correct log level is being set for python native logger and | ||
| transformers logger when env var LOG_LEVEL is used and CLI flag is passed. | ||
| In this case, CLI arg takes precedence over the set env var LOG_LEVEL. | ||
| """ | ||
|
|
||
| train_args = copy.deepcopy(TRAIN_ARGS) | ||
| train_args.log_level = "error" | ||
| training_args, logger = set_log_level(train_args) | ||
| assert logger.getEffectiveLevel() == logging.ERROR | ||
| assert training_args.log_level == "error" |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -14,11 +14,11 @@ | |
|
|
||
| # Standard | ||
| import json | ||
| import logging | ||
| import os | ||
|
|
||
| # Third Party | ||
| from aim.hugging_face import AimCallback # pylint: disable=import-error | ||
| from transformers.utils import logging | ||
|
|
||
| # Local | ||
| from .tracker import Tracker | ||
|
|
@@ -99,7 +99,8 @@ def __init__(self, tracker_config: AimConfig): | |
| information about the repo or the server and port where aim db is present. | ||
| """ | ||
| super().__init__(name="aim", tracker_config=tracker_config) | ||
| self.logger = logging.get_logger("aimstack_tracker") | ||
| # Get logger with root log level | ||
| self.logger = logging.getLogger() | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @Abhishek-TAMU any particular reason to remove module name from the
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same comment for
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Explained in the comment above..no need for futher explanation.
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Cool. Thank you!
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I still dont quite understand this. You're setting log_level at runtime to restrict the amount of logs that a user wants to see ( |
||
|
|
||
| def get_hf_callback(self): | ||
| """Returns the aim.hugging_face.AimCallback object associated with this tracker. | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Let's also add a comment here that CLI arg takes precedence over env var. And if neither is set, we use default "WARNING"
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thank you! This is done