Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def build_backbone(self, input_shape: Tuple[int, ...]) -> None:
out_features=self.config["num_units_%d" % i],
blocks_per_group=self.config["blocks_per_group_%d" % i],
last_block_index=(i - 1) * self.config["blocks_per_group_%d" % i],
dropout=self.config['use_dropout']
dropout=self.config[f'dropout_{i}'] if self.config['use_dropout'] else None,
)
)

Expand All @@ -52,7 +52,7 @@ def build_backbone(self, input_shape: Tuple[int, ...]) -> None:
return backbone

def _add_group(self, in_features: int, out_features: int,
blocks_per_group: int, last_block_index: int, dropout: bool
blocks_per_group: int, last_block_index: int, dropout: Optional[float]
) -> nn.Module:
"""
Adds a group into the main backbone.
Expand Down Expand Up @@ -206,7 +206,7 @@ def __init__(
out_features: int,
blocks_per_group: int,
block_index: int,
dropout: bool,
dropout: Optional[float],
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you please add a unit test where you create a pipeline with a config with dropout=0.123, and just check that the dropout of the layer is set to 0.123 to make sure this does not happen again?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sure

activation: nn.Module
):
super(ResBlock, self).__init__()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,14 @@ def build_backbone(self, input_shape: Tuple[int, ...]) -> None:
self.config.update(
{"num_units_%d" % (i): num for i, num in enumerate(neuron_counts)}
)
if self.config['use_dropout'] and self.config["max_dropout"] > 0.05:
# we are skipping the last layer, as the function get_shaped_neuron_counts
# is built for getting neuron counts, so it will add the out_features to
# the last layer. However, in dropout we dont want to have that, we just
# want to use the shape and not worry about the output.
if self.config['use_dropout']:
dropout_shape = get_shaped_neuron_counts(
self.config['resnet_shape'], 0, 0, 1000, self.config['num_groups']
)
self.config['resnet_shape'], 0, 0, 1000, self.config['num_groups'] + 1
)[:-1]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi, I immediately do not understand this. The comment is very clear and I get the intention... Can we add a unit test for this?

So that we call build_backbone and we get a model. Then we check that dropout was added as expected?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sure


dropout_shape = [
dropout / 1000 * self.config["max_dropout"] for dropout in dropout_shape
Expand All @@ -61,7 +65,7 @@ def build_backbone(self, input_shape: Tuple[int, ...]) -> None:
out_features=self.config["num_units_%d" % i],
blocks_per_group=self.config["blocks_per_group"],
last_block_index=(i - 1) * self.config["blocks_per_group"],
dropout=self.config['use_dropout']
dropout=self.config[f'dropout_{i}'] if self.config['use_dropout'] else None
)
)

Expand Down