Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions tests/optim/helpers/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from typing import Type

import torch


def _check_layer_in_model(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

leading _ is a convention for private. As we are exporting these 2 functions to be used by others, we can remove this leading _

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oops, yeah I'm not sure how I missed removing the underscore!

self,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

passing self here is a little weird, as these functions are not class method. How about updating these 2 functions to simply return the bool and doing the assert in the tests? This also makes the 2 utils more generic to other use cases.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The original idea was to follow after assertTensorAlmostEqual's input variable setup, but it does appear simpler to return bool outputs instead.

model: torch.nn.Module,
layer: Type[torch.nn.Module],
) -> None:
def check_for_layer_in_model(model, layer) -> bool:
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you agree with the above suggestion, we don't need another nested inner function here.

def check_layer_in_model(model, layer):
  for _, child in model._modules.items():
    if child is None:
      continue
    if isinstance(child, layer) or check_layer_in_model(child, layer):
      return True
  return False

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, we can just use the one function with bool outputs to test for the presence and absence of certain layers.

for name, child in model._modules.items():
if child is not None:
if isinstance(child, layer):
return True
if check_for_layer_in_model(child, layer):
return True
return False

self.assertTrue(check_for_layer_in_model(model, layer))


def _check_layer_not_in_model(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we may not even need this layer_not_in_model function, just not check_layer_in_model(model, layer)

Copy link
Contributor Author

@ProGamerGov ProGamerGov Mar 17, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@aobo-y

In addition to using not, we can also use self.assertFalse

self.assertFalse(check_layer_in_model(model, layer))
self.assertTrue(check_layer_in_model(model, layer))

But yeah, it looks like we don't need _check_layer_not_in_model after switching to bool outputs.

self, model: torch.nn.Module, layer: Type[torch.nn.Module]
) -> None:
for name, child in model._modules.items():
if child is not None:
self.assertNotIsInstance(child, layer)
_check_layer_not_in_model(self, child, layer)
Original file line number Diff line number Diff line change
@@ -1,38 +1,12 @@
#!/usr/bin/env python3
import unittest
from typing import Type

import torch

from captum.optim.models import googlenet
from captum.optim.models._common import RedirectedReluLayer, SkipLayer
from tests.helpers.basic import BaseTest, assertTensorAlmostEqual


def _check_layer_in_model(
self,
model: torch.nn.Module,
layer: Type[torch.nn.Module],
) -> None:
def check_for_layer_in_model(model, layer) -> bool:
for name, child in model._modules.items():
if child is not None:
if isinstance(child, layer):
return True
if check_for_layer_in_model(child, layer):
return True
return False

self.assertTrue(check_for_layer_in_model(model, layer))


def _check_layer_not_in_model(
self, model: torch.nn.Module, layer: Type[torch.nn.Module]
) -> None:
for name, child in model._modules.items():
if child is not None:
self.assertNotIsInstance(child, layer)
_check_layer_not_in_model(self, child, layer)
from tests.optim.helpers.models import _check_layer_in_model, _check_layer_not_in_model


class TestInceptionV1(BaseTest):
Expand Down