Skip to content

Commit

Permalink
fix code climate error
Browse files Browse the repository at this point in the history
Co-authored-by: Washington <[email protected]>
  • Loading branch information
emysdias and WashingtonBispo committed Feb 9, 2022
1 parent 5c3876c commit 9db1876
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 23 deletions.
23 changes: 0 additions & 23 deletions rasa/nlu/selectors/response_selector.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@
RESPONSE_SELECTOR_RANKING_KEY,
RESPONSE_SELECTOR_UTTER_ACTION_KEY,
RESPONSE_SELECTOR_DEFAULT_INTENT,
DEFAULT_TRANSFORMER_SIZE,
)
from rasa.shared.nlu.constants import (
TEXT,
Expand Down Expand Up @@ -382,27 +381,6 @@ def _warn_about_transformer_and_hidden_layers_enabled(
category=UserWarning,
)

def _warn_and_correct_transformer_size(self, selector_name: Text) -> None:
"""Corrects transformer size so that training doesn't break; informs the user.
If a transformer is used, the default `transformer_size` breaks things.
We need to set a reasonable default value so that the model works fine.
"""
if (
self.component_config[TRANSFORMER_SIZE] is None
or self.component_config[TRANSFORMER_SIZE] < 1
):
rasa.shared.utils.io.raise_warning(
f"`{TRANSFORMER_SIZE}` is set to "
f"`{self.component_config[TRANSFORMER_SIZE]}` for "
f"{selector_name}, but a positive size is required when using "
f"`{NUM_TRANSFORMER_LAYERS} > 0`. {selector_name} will proceed, using "
f"`{TRANSFORMER_SIZE}={DEFAULT_TRANSFORMER_SIZE}`. "
f"Alternatively, specify a different value in the component's config.",
category=UserWarning,
)
self.component_config[TRANSFORMER_SIZE] = DEFAULT_TRANSFORMER_SIZE

def _check_config_params_when_transformer_enabled(self) -> None:
"""Checks & corrects config parameters when the transformer is enabled.
Expand All @@ -414,7 +392,6 @@ def _check_config_params_when_transformer_enabled(self) -> None:
f"({self.retrieval_intent})" if self.retrieval_intent else ""
)
self._warn_about_transformer_and_hidden_layers_enabled(selector_name)
self._warn_and_correct_transformer_size(selector_name)

def _check_config_parameters(self) -> None:
"""Checks that component configuration makes sense; corrects it where needed."""
Expand Down
10 changes: 10 additions & 0 deletions rasa/utils/tensorflow/rasa_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
SENTENCE,
)
from rasa.utils.tensorflow import layers
import rasa.shared.utils.io
from rasa.utils.tensorflow.exceptions import TFLayerConfigException
from rasa.utils.tensorflow.transformer import TransformerEncoder
from rasa.nlu.constants import DEFAULT_TRANSFORMER_SIZE
Expand Down Expand Up @@ -810,6 +811,15 @@ def _get_transformer_dimensions(
if isinstance(transformer_units, dict):
transformer_units = transformer_units[attribute]
if transformer_layers > 0 and (not transformer_units or transformer_units < 1):
rasa.shared.utils.io.raise_warning(
f"`{TRANSFORMER_SIZE}` is set to "
f"`{transformer_units}` for "
f"{attribute}, but a positive size is required when using "
f"`{NUM_TRANSFORMER_LAYERS} > 0`. {attribute} will proceed, using "
f"`{TRANSFORMER_SIZE}={DEFAULT_TRANSFORMER_SIZE}`. "
f"Alternatively, specify a different value in the component's config.",
category=UserWarning,
)
transformer_units = DEFAULT_TRANSFORMER_SIZE

return transformer_layers, transformer_units
Expand Down

0 comments on commit 9db1876

Please sign in to comment.