diff --git a/.gitignore b/.gitignore index d56fc37..33475f2 100644 --- a/.gitignore +++ b/.gitignore @@ -49,3 +49,4 @@ sdist/ target/ var/ venv/ +.vscode \ No newline at end of file diff --git a/README.rst b/README.rst index ac8ed7b..c38352a 100644 --- a/README.rst +++ b/README.rst @@ -12,21 +12,21 @@ A tantalizing preview of Keras-ResNet simplicity: .. code-block:: python - >>> import keras + >>> import tensorflow.keras >>> import keras_resnet.models >>> shape, classes = (32, 32, 3), 10 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x, classes=classes) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) - >>> (training_x, training_y), (_, _) = keras.datasets.cifar10.load_data() + >>> (training_x, training_y), (_, _) = tensorflow.keras.datasets.cifar10.load_data() - >>> training_y = keras.utils.np_utils.to_categorical(training_y) + >>> training_y = tensorflow.keras.utils.np_utils.to_categorical(training_y) >>> model.fit(training_x, training_y) diff --git a/keras_resnet/benchmarks/__init__.py b/keras_resnet/benchmarks/__init__.py index 415aa58..0cdedd1 100644 --- a/keras_resnet/benchmarks/__init__.py +++ b/keras_resnet/benchmarks/__init__.py @@ -1,8 +1,8 @@ import os.path import click -import keras -import keras.preprocessing.image +import tensorflow.keras +import tensorflow.keras.preprocessing.image import numpy import pkg_resources import sklearn.model_selection @@ -12,9 +12,9 @@ import keras_resnet.models _benchmarks = { - "CIFAR-10": keras.datasets.cifar10, - "CIFAR-100": keras.datasets.cifar100, - "MNIST": keras.datasets.mnist + "CIFAR-10": tensorflow.keras.datasets.cifar10, + "CIFAR-100": tensorflow.keras.datasets.cifar100, + "MNIST": tensorflow.keras.datasets.mnist } @@ -65,7 +65,7 @@ def __main__(benchmark, device, name): session = tensorflow.Session(config=configuration) - keras.backend.set_session(session) + tensorflow.keras.backend.set_session(session) (training_x, training_y), _ = _benchmarks[benchmark].load_data() @@ -74,14 +74,14 @@ def __main__(benchmark, device, name): if benchmark is "MNIST": training_x = numpy.expand_dims(training_x, -1) - training_y = keras.utils.np_utils.to_categorical(training_y) + training_y = tensorflow.keras.utils.np_utils.to_categorical(training_y) training_x, validation_x, training_y, validation_y = sklearn.model_selection.train_test_split( training_x, training_y ) - generator = keras.preprocessing.image.ImageDataGenerator( + generator = tensorflow.keras.preprocessing.image.ImageDataGenerator( horizontal_flip=True ) @@ -93,7 +93,7 @@ def __main__(benchmark, device, name): batch_size=256 ) - validation_data = keras.preprocessing.image.ImageDataGenerator() + validation_data = tensorflow.keras.preprocessing.image.ImageDataGenerator() validation_data.fit(validation_x) @@ -105,7 +105,7 @@ def __main__(benchmark, device, name): shape, classes = training_x.shape[1:], training_y.shape[-1] - x = keras.layers.Input(shape) + x = tensorflow.keras.layers.Input(shape) model = _names[name](inputs=x, classes=classes) @@ -120,13 +120,13 @@ def __main__(benchmark, device, name): pathname = pkg_resources.resource_filename("keras_resnet", pathname) - model_checkpoint = keras.callbacks.ModelCheckpoint(pathname) + model_checkpoint = tensorflow.keras.callbacks.ModelCheckpoint(pathname) pathname = os.path.join("data", "logs", benchmark, "{}.csv".format(name)) pathname = pkg_resources.resource_filename("keras_resnet", pathname) - csv_logger = keras.callbacks.CSVLogger(pathname) + csv_logger = tensorflow.keras.callbacks.CSVLogger(pathname) callbacks = [ csv_logger, diff --git a/keras_resnet/blocks/_1d.py b/keras_resnet/blocks/_1d.py index f220396..beed232 100644 --- a/keras_resnet/blocks/_1d.py +++ b/keras_resnet/blocks/_1d.py @@ -9,281 +9,291 @@ import keras.layers import keras.regularizers - import keras_resnet.layers -parameters = { - "kernel_initializer": "he_normal" -} - +parameters = {"kernel_initializer": "he_normal"} -def basic_1d( - filters, - stage=0, - block=0, - kernel_size=3, - numerical_name=False, - stride=None, - freeze_bn=False -): +class Basic1D(keras.layers.Layer): """ A one-dimensional basic block. - :param filters: the output’s feature space - :param stage: int representing the stage of this block (starting from 0) - :param block: int representing this block (starting from 0) - :param kernel_size: size of the kernel - :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) - :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id - :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) - - Usage: - - >>> import keras_resnet.blocks - - >>> keras_resnet.blocks.basic_1d(64) """ - if stride is None: - if block != 0 or stage == 0: - stride = 1 - else: - stride = 2 - if keras.backend.image_data_format() == "channels_last": - axis = -1 - else: - axis = 1 + def __init__(self, + filters, + stage=0, + block=0, + kernel_size=3, + numerical_name=False, + stride=None, + freeze_bn=False, + **kwargs): + super(Basic1D, self).__init__(**kwargs) + + self.filters = filters + self.stage = stage + self.block = block + self.kernel_size = kernel_size + self.freeze_bn = freeze_bn + self.stride = stride + + if stride is None: + if block != 0 or stage == 0: + self.stride = 1 + else: + self.stride = 2 + + if keras.backend.image_data_format() == "channels_last": + self.axis = -1 + else: + self.axis = 1 - if block > 0 and numerical_name: - block_char = "b{}".format(block) - else: - block_char = chr(ord('a') + block) + if block > 0 and numerical_name: + self.block_char = "b{}".format(block) + else: + self.block_char = chr(ord('a') + block) - stage_char = str(stage + 2) + self.stage_char = str(stage + 2) - def f(x): - y = keras.layers.ZeroPadding1D( - padding=1, - name="padding{}{}_branch2a".format(stage_char, block_char) - )(x) - y = keras.layers.Conv1D( - filters, - kernel_size, - strides=stride, + self.zeropadding1da = keras.layers.ZeroPadding1D( + padding=1, + name="padding{}{}_branch2a".format(self.stage_char, self.block_char) + ) + self.conv1da = keras.layers.Conv1D( + self.filters, + self.kernel_size, + strides=self.stride, use_bias=False, - name="res{}{}_branch2a".format(stage_char, block_char), + name="res{}{}_branch2a".format(self.stage_char, self.block_char), **parameters - )(y) - - y = keras_resnet.layers.BatchNormalization( - axis=axis, + ) + self.batchnormalizationa = keras_resnet.layers.BatchNormalization( + axis=self.axis, epsilon=1e-5, - freeze=freeze_bn, - name="bn{}{}_branch2a".format(stage_char, block_char) - )(y) - - y = keras.layers.Activation( + freeze=self.freeze_bn, + name="bn{}{}_branch2a".format(self.stage_char, self.block_char) + ) + self.activationa = keras.layers.Activation( "relu", - name="res{}{}_branch2a_relu".format(stage_char, block_char) - )(y) - - y = keras.layers.ZeroPadding1D( + name="res{}{}_branch2a_relu".format(self.stage_char, self.block_char) + ) + self.zeropadding1db = keras.layers.ZeroPadding1D( padding=1, - name="padding{}{}_branch2b".format(stage_char, block_char) - )(y) - - y = keras.layers.Conv1D( - filters, - kernel_size, + name="padding{}{}_branch2b".format(self.stage_char, self.block_char) + ) + self.conv1db = keras.layers.Conv1D( + self.filters, + self.kernel_size, use_bias=False, - name="res{}{}_branch2b".format(stage_char, block_char), + name="res{}{}_branch2b".format(self.stage_char, self.block_char), **parameters - )(y) - - y = keras_resnet.layers.BatchNormalization( - axis=axis, + ) + self.batchnormalizationb = keras_resnet.layers.BatchNormalization( + axis=self.axis, epsilon=1e-5, - freeze=freeze_bn, - name="bn{}{}_branch2b".format(stage_char, block_char) - )(y) - - if block == 0: - shortcut = keras.layers.Conv1D( - filters, - 1, - strides=stride, - use_bias=False, - name="res{}{}_branch1".format(stage_char, block_char), - **parameters - )(x) - - shortcut = keras_resnet.layers.BatchNormalization( - axis=axis, - epsilon=1e-5, - freeze=freeze_bn, - name="bn{}{}_branch1".format(stage_char, block_char) - )(shortcut) - else: - shortcut = x - - y = keras.layers.Add( - name="res{}{}".format(stage_char, block_char) - )([y, shortcut]) - - y = keras.layers.Activation( + freeze=self.freeze_bn, + name="bn{}{}_branch2b".format(self.stage_char, self.block_char) + ) + if self.block == 0: #Dotted line connections in ResNet paper + self.conv1dc = keras.layers.Conv1D( + self.filters, + 1, + strides=self.stride, + use_bias=False, + name="res{}{}_branch1".format(self.stage_char, self.block_char), + **parameters + ) + self.batchnormalizationc = keras_resnet.layers.BatchNormalization( + axis=self.axis, + epsilon=1e-5, + freeze=self.freeze_bn, + name="bn{}{}_branch1".format(self.stage_char, self.block_char) + ) + self.add = keras.layers.Add( + name="res{}{}".format(self.stage_char, self.block_char) + ) + self.activationb = keras.layers.Activation( "relu", - name="res{}{}_relu".format(stage_char, block_char) - )(y) + name="res{}{}_relu".format(self.stage_char, self.block_char) + ) + + def call(self, inputs): + y = self.zeropadding1da(inputs) #padding2a_branch2a + y = self.conv1da(y) #res2a_branch2a + y = self.batchnormalizationa(y) #bn2a_branch2a + y = self.activationa(y) #res2a_branch2a_relu + y = self.zeropadding1db(y) #padding2a_branch2b + y = self.conv1db(y) #res2a_branch2b + y = self.batchnormalizationb(y) #bn2a_branch2b + + if self.block == 0: #Dotted line connections in ResNet paper + shortcut = self.conv1dc(inputs) #!!!res2a_branch1 + shortcut = self.batchnormalizationc(shortcut) + else: #Solid line connections in ResNet paper + shortcut = inputs + + y = self.add([y, shortcut]) + y = self.activationb(y) return y - return f - -def bottleneck_1d( - filters, - stage=0, - block=0, - kernel_size=3, - numerical_name=False, - stride=None, - freeze_bn=False -): +class Bottleneck1D(keras.layers.Layer): """ A one-dimensional bottleneck block. - :param filters: the output’s feature space - :param stage: int representing the stage of this block (starting from 0) - :param block: int representing this block (starting from 0) - :param kernel_size: size of the kernel - :param numerical_name: if true, uses numbers to represent blocks instead of chars (ResNet{101, 152, 200}) - :param stride: int representing the stride used in the shortcut and the first conv layer, default derives stride from block id - :param freeze_bn: if true, freezes BatchNormalization layers (ie. no updates are done in these layers) - - Usage: - - >>> import keras_resnet.blocks - - >>> keras_resnet.blocks.bottleneck_1d(64) """ - if stride is None: - stride = 1 if block != 0 or stage == 0 else 2 - - if keras.backend.image_data_format() == "channels_last": - axis = -1 - else: - axis = 1 + def __init__(self, + filters, + stage=0, + block=0, + kernel_size=3, + numerical_name=False, + stride=None, + freeze_bn=False, + **kwargs): + super(Bottleneck1D, self).__init__(**kwargs) + + self.filters = filters + self.stage = stage + self.block = block + self.kernel_size = kernel_size + self.freeze_bn = freeze_bn + self.stride = stride + + if stride is None: + self.stride = 1 if block != 0 or stage == 0 else 2 + + if keras.backend.image_data_format() == "channels_last": + self.axis = -1 + else: + self.axis = 1 - if block > 0 and numerical_name: - block_char = "b{}".format(block) - else: - block_char = chr(ord('a') + block) + if block > 0 and numerical_name: + self.block_char = "b{}".format(block) + else: + self.block_char = chr(ord('a') + block) - stage_char = str(stage + 2) + self.stage_char = str(stage + 2) - def f(x): - y = keras.layers.Conv1D( - filters, + + self.conv1da = keras.layers.Conv1D( + self.filters, 1, - strides=stride, + strides=self.stride, use_bias=False, - name="res{}{}_branch2a".format(stage_char, block_char), + name="res{}{}_branch2a".format(self.stage_char, self.block_char), **parameters - )(x) + ) - y = keras_resnet.layers.BatchNormalization( - axis=axis, + self.batchnormalizationa = keras_resnet.layers.BatchNormalization( + axis=self.axis, epsilon=1e-5, - freeze=freeze_bn, - name="bn{}{}_branch2a".format(stage_char, block_char) - )(y) + freeze=self.freeze_bn, + name="bn{}{}_branch2a".format(self.stage_char, self.block_char) + ) - y = keras.layers.Activation( + self.activationa = keras.layers.Activation( "relu", - name="res{}{}_branch2a_relu".format(stage_char, block_char) - )(y) + name="res{}{}_branch2a_relu".format(self.stage_char, self.block_char) + ) - y = keras.layers.ZeroPadding1D( + self.zeropadding1da = keras.layers.ZeroPadding1D( padding=1, - name="padding{}{}_branch2b".format(stage_char, block_char) - )(y) + name="padding{}{}_branch2b".format(self.stage_char, self.block_char) + ) - y = keras.layers.Conv1D( - filters, - kernel_size, + self.conv1db = keras.layers.Conv1D( + self.filters, + self.kernel_size, use_bias=False, - name="res{}{}_branch2b".format(stage_char, block_char), + name="res{}{}_branch2b".format(self.stage_char, self.block_char), **parameters - )(y) + ) - y = keras_resnet.layers.BatchNormalization( - axis=axis, + self.batchnormalizationb = keras_resnet.layers.BatchNormalization( + axis=self.axis, epsilon=1e-5, - freeze=freeze_bn, - name="bn{}{}_branch2b".format(stage_char, block_char) - )(y) + freeze=self.freeze_bn, + name="bn{}{}_branch2b".format(self.stage_char, self.block_char) + ) - y = keras.layers.Activation( + self.activationb = keras.layers.Activation( "relu", - name="res{}{}_branch2b_relu".format(stage_char, block_char) - )(y) + name="res{}{}_branch2b_relu".format(self.stage_char, self.block_char) + ) - y = keras.layers.Conv1D( - filters * 4, + self.conv1dc = keras.layers.Conv1D( + self.filters * 4, 1, use_bias=False, - name="res{}{}_branch2c".format(stage_char, block_char), + name="res{}{}_branch2c".format(self.stage_char, self.block_char), **parameters - )(y) + ) - y = keras_resnet.layers.BatchNormalization( - axis=axis, + self.batchnormalizationc = keras_resnet.layers.BatchNormalization( + axis=self.axis, epsilon=1e-5, - freeze=freeze_bn, - name="bn{}{}_branch2c".format(stage_char, block_char) - )(y) - - if block == 0: - shortcut = keras.layers.Conv1D( - filters * 4, - 1, - strides=stride, - use_bias=False, - name="res{}{}_branch1".format(stage_char, block_char), - **parameters - )(x) - - shortcut = keras_resnet.layers.BatchNormalization( - axis=axis, - epsilon=1e-5, - freeze=freeze_bn, - name="bn{}{}_branch1".format(stage_char, block_char) - )(shortcut) - else: - shortcut = x + freeze=self.freeze_bn, + name="bn{}{}_branch2c".format(self.stage_char, self.block_char) + ) - y = keras.layers.Add( - name="res{}{}".format(stage_char, block_char) - )([y, shortcut]) + self.conv1dd = keras.layers.Conv1D( + self.filters * 4, + 1, + strides=self.stride, + use_bias=False, + name="res{}{}_branch1".format(self.stage_char, self.block_char), + **parameters + ) - y = keras.layers.Activation( - "relu", - name="res{}{}_relu".format(stage_char, block_char) - )(y) + self.batchnormalizationd = keras_resnet.layers.BatchNormalization( + axis=self.axis, + epsilon=1e-5, + freeze=self.freeze_bn, + name="bn{}{}_branch1".format(self.stage_char, self.block_char) + ) - return y + self.add = keras.layers.Add( + name="res{}{}".format(self.stage_char, self.block_char) + ) + + self.activationc = keras.layers.Activation( + "relu", + name="res{}{}_relu".format(self.stage_char, self.block_char) + ) + + def call(self, inputs): + y = self.conv1da(inputs) + y = self.batchnormalizationa(y) + y = self.activationa(y) + y = self.zeropadding1da(y) + y = self.conv1db(y) + y = self.batchnormalizationb(y) + y = self.activationb(y) + y = self.conv1dc(y) + y = self.batchnormalizationc(y) + + if self.block == 0: #Dotted line connections in ResNet paper + shortcut = self.conv1dd(inputs) + shortcut = self.batchnormalizationd(shortcut) + else: #Solid line connections in ResNet paper + shortcut = inputs + + y = self.add([y, shortcut]) + y = self.activationc(y) - return f + return y \ No newline at end of file diff --git a/keras_resnet/blocks/_2d.py b/keras_resnet/blocks/_2d.py index ce774fb..e2b6a36 100644 --- a/keras_resnet/blocks/_2d.py +++ b/keras_resnet/blocks/_2d.py @@ -7,8 +7,8 @@ This module implements a number of popular two-dimensional residual blocks. """ -import keras.layers -import keras.regularizers +import tensorflow.keras.layers +import tensorflow.keras.regularizers import keras_resnet.layers @@ -55,7 +55,7 @@ def basic_2d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -68,30 +68,30 @@ def basic_2d( stage_char = str(stage + 2) def f(x): - y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -136,7 +136,7 @@ def bottleneck_2d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -149,34 +149,34 @@ def bottleneck_2d( stage_char = str(stage + 2) def f(x): - y = keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) + y = tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.ZeroPadding2D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/blocks/_3d.py b/keras_resnet/blocks/_3d.py index ba11f6e..0255131 100644 --- a/keras_resnet/blocks/_3d.py +++ b/keras_resnet/blocks/_3d.py @@ -7,8 +7,8 @@ This module implements a number of popular three-dimensional residual blocks. """ -import keras.layers -import keras.regularizers +import tensorflow.keras.layers +import tensorflow.keras.regularizers import keras_resnet.layers @@ -55,7 +55,7 @@ def basic_3d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -68,30 +68,30 @@ def basic_3d( stage_char = str(stage + 2) def f(x): - y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = keras.layers.Conv3D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv3D(filters, kernel_size, strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = tensorflow.keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -136,7 +136,7 @@ def bottleneck_3d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -149,34 +149,34 @@ def bottleneck_3d( stage_char = str(stage + 2) def f(x): - y = keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) + y = tensorflow.keras.layers.Conv3D(filters, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch2a".format(stage_char, block_char), **parameters)(x) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.ZeroPadding3D(padding=1, name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv3D(filters, kernel_size, use_bias=False, name="res{}{}_branch2b".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = keras.layers.Conv3D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) + y = tensorflow.keras.layers.Conv3D(filters * 4, (1, 1), use_bias=False, name="res{}{}_branch2c".format(stage_char, block_char), **parameters)(y) y = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.Conv3D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) + shortcut = tensorflow.keras.layers.Conv3D(filters * 4, (1, 1), strides=stride, use_bias=False, name="res{}{}_branch1".format(stage_char, block_char), **parameters)(x) shortcut = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.Activation("relu", name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/blocks/__init__.py b/keras_resnet/blocks/__init__.py index 11799d0..ea20781 100644 --- a/keras_resnet/blocks/__init__.py +++ b/keras_resnet/blocks/__init__.py @@ -8,8 +8,8 @@ """ from ._1d import ( - basic_1d, - bottleneck_1d + Basic1D, + Bottleneck1D ) from ._2d import ( diff --git a/keras_resnet/blocks/_time_distributed_2d.py b/keras_resnet/blocks/_time_distributed_2d.py index 31f5bcd..dc1a2fb 100644 --- a/keras_resnet/blocks/_time_distributed_2d.py +++ b/keras_resnet/blocks/_time_distributed_2d.py @@ -7,8 +7,8 @@ This module implements a number of popular time distributed two-dimensional residual blocks. """ -import keras.layers -import keras.regularizers +import tensorflow.keras.layers +import tensorflow.keras.regularizers import keras_resnet.layers @@ -57,7 +57,7 @@ def time_distributed_basic_2d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -70,30 +70,30 @@ def time_distributed_basic_2d( stage_char = str(stage + 2) def f(x): - y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2a".format(stage_char, block_char))(x) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2a".format(stage_char, block_char))(x) - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) + shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) - shortcut = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) + shortcut = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) return y @@ -140,7 +140,7 @@ def time_distributed_bottleneck_2d( else: stride = 2 - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -153,34 +153,34 @@ def time_distributed_bottleneck_2d( stage_char = str(stage + 2) def f(x): - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(x) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch2a".format(stage_char, block_char))(x) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2a".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2a_relu".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=1), name="padding{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters, kernel_size, use_bias=False, **parameters), name="res{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2b".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_branch2b_relu".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, **parameters), name="res{}{}_branch2c".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), use_bias=False, **parameters), name="res{}{}_branch2c".format(stage_char, block_char))(y) - y = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2c".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch2c".format(stage_char, block_char))(y) if block == 0: - shortcut = keras.layers.TimeDistributed(keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) + shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(filters * 4, (1, 1), strides=stride, use_bias=False, **parameters), name="res{}{}_branch1".format(stage_char, block_char))(x) - shortcut = keras.layers.TimeDistributed(keras.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) + shortcut = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn{}{}_branch1".format(stage_char, block_char))(shortcut) else: shortcut = x - y = keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) + y = tensorflow.keras.layers.Add(name="res{}{}".format(stage_char, block_char))([y, shortcut]) - y = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) + y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="res{}{}_relu".format(stage_char, block_char))(y) return y diff --git a/keras_resnet/classifiers/_2d.py b/keras_resnet/classifiers/_2d.py index b9ac7a3..46d2a75 100644 --- a/keras_resnet/classifiers/_2d.py +++ b/keras_resnet/classifiers/_2d.py @@ -7,19 +7,19 @@ This module implements popular residual two-dimensional classifiers. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.models -class ResNet18(keras.models.Model): +class ResNet18(tensorflow.keras.models.Model): """ A :class:`ResNet18 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -27,7 +27,7 @@ class ResNet18(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet18(x) @@ -36,18 +36,18 @@ class ResNet18(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet18(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet18, self).__init__(inputs, outputs) -class ResNet34(keras.models.Model): +class ResNet34(tensorflow.keras.models.Model): """ A :class:`ResNet34 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -55,7 +55,7 @@ class ResNet34(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet34(x) @@ -64,18 +64,18 @@ class ResNet34(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet34(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet34, self).__init__(inputs, outputs) -class ResNet50(keras.models.Model): +class ResNet50(tensorflow.keras.models.Model): """ A :class:`ResNet50 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -83,7 +83,7 @@ class ResNet50(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet50(x) @@ -92,18 +92,18 @@ class ResNet50(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet50(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet50, self).__init__(inputs, outputs) -class ResNet101(keras.models.Model): +class ResNet101(tensorflow.keras.models.Model): """ A :class:`ResNet101 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -111,7 +111,7 @@ class ResNet101(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet101(x) @@ -120,18 +120,18 @@ class ResNet101(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet101(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet101, self).__init__(inputs, outputs) -class ResNet152(keras.models.Model): +class ResNet152(tensorflow.keras.models.Model): """ A :class:`ResNet152 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -139,7 +139,7 @@ class ResNet152(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet152(x) @@ -149,18 +149,18 @@ class ResNet152(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet152(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet152, self).__init__(inputs, outputs) -class ResNet200(keras.models.Model): +class ResNet200(tensorflow.keras.models.Model): """ A :class:`ResNet200 ` object. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) Usage: @@ -168,7 +168,7 @@ class ResNet200(keras.models.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.classifiers.ResNet200(x) @@ -177,8 +177,8 @@ class ResNet200(keras.models.Model): def __init__(self, inputs, classes): outputs = keras_resnet.models.ResNet200(inputs) - outputs = keras.layers.Flatten()(outputs.output) + outputs = tensorflow.keras.layers.Flatten()(outputs.output) - outputs = keras.layers.Dense(classes, activation="softmax")(outputs) + outputs = tensorflow.keras.layers.Dense(classes, activation="softmax")(outputs) super(ResNet200, self).__init__(inputs, outputs) diff --git a/keras_resnet/layers/_batch_normalization.py b/keras_resnet/layers/_batch_normalization.py index 1946a3f..b8367b4 100644 --- a/keras_resnet/layers/_batch_normalization.py +++ b/keras_resnet/layers/_batch_normalization.py @@ -1,9 +1,7 @@ -import keras - - -class BatchNormalization(keras.layers.BatchNormalization): +import tensorflow.keras +class BatchNormalization(tensorflow.keras.layers.BatchNormalization): """ - Identical to keras.layers.BatchNormalization, but adds the option to freeze parameters. + Identical to tensorflow.keras.layers.BatchNormalization, but adds the option to freeze parameters. """ def __init__(self, freeze, *args, **kwargs): self.freeze = freeze @@ -12,11 +10,11 @@ def __init__(self, freeze, *args, **kwargs): # set to non-trainable if freeze is true self.trainable = not self.freeze - def call(self, *args, **kwargs): + def call(self, inputs, *args, **kwargs): # Force test mode if frozen, otherwise use default behaviour (i.e., training=None). if self.freeze: kwargs['training'] = False - return super(BatchNormalization, self).call(*args, **kwargs) + return super(BatchNormalization, self).call(inputs, *args, **kwargs) def get_config(self): config = super(BatchNormalization, self).get_config() diff --git a/keras_resnet/metrics.py b/keras_resnet/metrics.py index f642619..1b1a343 100644 --- a/keras_resnet/metrics.py +++ b/keras_resnet/metrics.py @@ -1,9 +1,9 @@ -import keras.metrics +import tensorflow.keras.metrics def top_1_categorical_error(y_true, y_pred): - return 1.0 - keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) + return 1.0 - tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 1) def top_5_categorical_error(y_true, y_pred): - return 1.0 - keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) + return 1.0 - tensorflow.keras.metrics.top_k_categorical_accuracy(y_true, y_pred, 5) diff --git a/keras_resnet/models/_1d.py b/keras_resnet/models/_1d.py index e7485c1..36cf30f 100644 --- a/keras_resnet/models/_1d.py +++ b/keras_resnet/models/_1d.py @@ -55,58 +55,79 @@ class ResNet1D(keras.Model): """ def __init__( self, - inputs, blocks, block, include_top=True, classes=1000, freeze_bn=True, numerical_names=None, + name = "ResNet1D", *args, **kwargs ): + super(ResNet1D, self).__init__(name=name, *args, **kwargs) + self.blocks = blocks + self.block = block + self.include_top = include_top + self.classes = classes + self.freeze_bn = freeze_bn + self.numerical_names = numerical_names + if keras.backend.image_data_format() == "channels_last": - axis = 3 + self.axis = -1 else: - axis = 1 + self.axis = 1 if numerical_names is None: - numerical_names = [True] * len(blocks) - - x = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1")(inputs) - x = keras.layers.Conv1D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) - x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling1D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) - + self.numerical_names = [True] * len(blocks) + + self.zeropadding1d = keras.layers.ZeroPadding1D(padding=3, name="padding_conv1") + self.conv1d = keras.layers.Conv1D(64, 7, strides=2, use_bias=False, name="conv1") + self.batchnormalization = keras_resnet.layers.BatchNormalization(axis=self.axis, epsilon=1e-5, freeze=self.freeze_bn, name="bn_conv1") + self.activation = keras.layers.Activation("relu", name="conv1_relu") + self.maxpooling1d = keras.layers.MaxPooling1D(3, strides=2, padding="same", name="pool1") + self.globalaveragepooling1d = keras.layers.GlobalAveragePooling1D(name="pool5") + self.dense = keras.layers.Dense(self.classes, activation="softmax", name="fc1000") + + self.stagelist = [] + self.blocklist = [] + features = 64 - - outputs = [] - for stage_id, iterations in enumerate(blocks): for block_id in range(iterations): - x = block( - features, - stage_id, - block_id, - numerical_name=(block_id > 0 and numerical_names[stage_id]), - freeze_bn=freeze_bn - )(x) - + curr_block = block(features, + stage_id, + block_id, + numerical_name=(block_id > 0 and self.numerical_names[stage_id]), + freeze_bn=self.freeze_bn + ) + self.blocklist.append(curr_block) + self.stagelist.append(self.blocklist) + self.blocklist = [] features *= 2 - + + + def call(self, inputs, training=False): + x = self.zeropadding1d(inputs) + x = self.conv1d(x) + x = self.batchnormalization(x) + x = self.activation(x) + x = self.maxpooling1d(x) + + outputs = list() + + for stage in self.stagelist: + for block in stage: + x = block(x) outputs.append(x) - if include_top: - assert classes > 0 - - x = keras.layers.GlobalAveragePooling1D(name="pool5")(x) - x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) - - super(ResNet1D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) + if self.include_top: + assert self.classes > 0 + x = self.globalaveragepooling1d(x) + x = self.dense(x) + return x else: - # Else output each stages features - super(ResNet1D, self).__init__(inputs=inputs, outputs=outputs, *args, **kwargs) + return outputs class ResNet1D18(ResNet1D): @@ -137,14 +158,13 @@ class ResNet1D18(ResNet1D): >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): if blocks is None: blocks = [2, 2, 2, 2] super(ResNet1D18, self).__init__( - inputs, blocks, - block=keras_resnet.blocks.basic_1d, + block=keras_resnet.blocks.Basic1D, include_top=include_top, classes=classes, freeze_bn=freeze_bn, @@ -152,6 +172,8 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b **kwargs ) + def call(self, inputs): + return super(ResNet1D18, self).call(inputs) class ResNet1D34(ResNet1D): """ @@ -181,21 +203,21 @@ class ResNet1D34(ResNet1D): >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, **kwargs): if blocks is None: blocks = [3, 4, 6, 3] super(ResNet1D34, self).__init__( - inputs, blocks, - block=keras_resnet.blocks.basic_1d, + block=keras_resnet.blocks.Basic1D, include_top=include_top, classes=classes, freeze_bn=freeze_bn, - *args, **kwargs ) + def call(self, inputs): + return super(ResNet1D34, self).call(inputs) class ResNet1D50(ResNet1D): """ @@ -225,24 +247,24 @@ class ResNet1D50(ResNet1D): >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, **kwargs): if blocks is None: blocks = [3, 4, 6, 3] numerical_names = [False, False, False, False] super(ResNet1D50, self).__init__( - inputs, blocks, numerical_names=numerical_names, - block=keras_resnet.blocks.bottleneck_1d, + block=keras_resnet.blocks.Bottleneck1D, include_top=include_top, classes=classes, freeze_bn=freeze_bn, - *args, **kwargs ) + def call(self, inputs): + return super(ResNet1D50, self).call(inputs) class ResNet1D101(ResNet1D): """ @@ -272,25 +294,24 @@ class ResNet1D101(ResNet1D): >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, **kwargs): if blocks is None: blocks = [3, 4, 23, 3] numerical_names = [False, True, True, False] super(ResNet1D101, self).__init__( - inputs, blocks, numerical_names=numerical_names, - block=keras_resnet.blocks.bottleneck_1d, + block=keras_resnet.blocks.Bottleneck1D, include_top=include_top, classes=classes, freeze_bn=freeze_bn, - *args, **kwargs ) - + def call(self, inputs): + return super(ResNet1D101, self).call(inputs) class ResNet1D152(ResNet1D): """ Constructs a `keras.models.Model` according to the ResNet152 specifications. @@ -319,25 +340,24 @@ class ResNet1D152(ResNet1D): >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, **kwargs): if blocks is None: blocks = [3, 8, 36, 3] numerical_names = [False, True, True, False] super(ResNet1D152, self).__init__( - inputs, blocks, numerical_names=numerical_names, - block=keras_resnet.blocks.bottleneck_1d, + block=keras_resnet.blocks.Bottleneck1D, include_top=include_top, classes=classes, freeze_bn=freeze_bn, - *args, **kwargs ) - + def call(self, inputs): + return super(ResNet1D152, self).call(inputs) class ResNet1D200(ResNet1D): """ Constructs a `keras.models.Model` according to the ResNet200 specifications. @@ -366,20 +386,21 @@ class ResNet1D200(ResNet1D): >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_bn=False, *args, **kwargs): + def __init__(self, blocks=None, include_top=True, classes=1000, freeze_bn=False, **kwargs): if blocks is None: blocks = [3, 24, 36, 3] numerical_names = [False, True, True, False] super(ResNet1D200, self).__init__( - inputs, blocks, numerical_names=numerical_names, - block=keras_resnet.blocks.bottleneck_1d, + block=keras_resnet.blocks.Bottleneck1D, include_top=include_top, classes=classes, freeze_bn=freeze_bn, - *args, **kwargs ) + + def call(self, inputs): + return super(ResNet1D200, self).call(inputs) \ No newline at end of file diff --git a/keras_resnet/models/_2d.py b/keras_resnet/models/_2d.py index 8d8f874..e9e490b 100644 --- a/keras_resnet/models/_2d.py +++ b/keras_resnet/models/_2d.py @@ -7,20 +7,20 @@ This module implements popular two-dimensional residual models. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class ResNet2D(keras.Model): +class ResNet2D(tensorflow.keras.Model): """ - Constructs a `keras.models.Model` object using the given block count. + Constructs a `tensorflow.keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -43,7 +43,7 @@ class ResNet2D(keras.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -65,7 +65,7 @@ def __init__( *args, **kwargs ): - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -73,10 +73,10 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) + x = tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) + x = tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -99,8 +99,8 @@ def __init__( if include_top: assert classes > 0 - x = keras.layers.GlobalAveragePooling2D(name="pool5")(x) - x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) + x = tensorflow.keras.layers.GlobalAveragePooling2D(name="pool5")(x) + x = tensorflow.keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) super(ResNet2D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) else: @@ -110,9 +110,9 @@ def __init__( class ResNet2D18(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet18 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -130,7 +130,7 @@ class ResNet2D18(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet18(x, classes=classes) @@ -154,9 +154,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D34(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet34 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -174,7 +174,7 @@ class ResNet2D34(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet34(x, classes=classes) @@ -198,9 +198,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D50(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet50 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -218,7 +218,7 @@ class ResNet2D50(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x) @@ -245,9 +245,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D101(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet101 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -265,7 +265,7 @@ class ResNet2D101(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet101(x, classes=classes) @@ -292,9 +292,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D152(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet152 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -312,7 +312,7 @@ class ResNet2D152(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet152(x, classes=classes) @@ -339,9 +339,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet2D200(ResNet2D): """ - Constructs a `keras.models.Model` according to the ResNet200 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -359,7 +359,7 @@ class ResNet2D200(ResNet2D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet200(x, classes=classes) diff --git a/keras_resnet/models/_3d.py b/keras_resnet/models/_3d.py index cd98a50..0bc95e5 100644 --- a/keras_resnet/models/_3d.py +++ b/keras_resnet/models/_3d.py @@ -7,20 +7,20 @@ This module implements popular three-dimensional residual models. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class ResNet3D(keras.Model): +class ResNet3D(tensorflow.keras.Model): """ - Constructs a `keras.models.Model` object using the given block count. + Constructs a `tensorflow.keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -43,7 +43,7 @@ class ResNet3D(keras.Model): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -65,7 +65,7 @@ def __init__( *args, **kwargs ): - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -73,11 +73,11 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = keras.layers.ZeroPadding3D(padding=3, name="padding_conv1")(inputs) - x = keras.layers.Conv3D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) + x = tensorflow.keras.layers.ZeroPadding3D(padding=3, name="padding_conv1")(inputs) + x = tensorflow.keras.layers.Conv3D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1")(x) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling3D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) + x = tensorflow.keras.layers.MaxPooling3D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -100,8 +100,8 @@ def __init__( if include_top: assert classes > 0 - x = keras.layers.GlobalAveragePooling3D(name="pool5")(x) - x = keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) + x = tensorflow.keras.layers.GlobalAveragePooling3D(name="pool5")(x) + x = tensorflow.keras.layers.Dense(classes, activation="softmax", name="fc1000")(x) super(ResNet3D, self).__init__(inputs=inputs, outputs=x, *args, **kwargs) else: @@ -111,9 +111,9 @@ def __init__( class ResNet3D18(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet18 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -131,7 +131,7 @@ class ResNet3D18(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet18(x, classes=classes) @@ -155,9 +155,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D34(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet34 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -175,7 +175,7 @@ class ResNet3D34(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet34(x, classes=classes) @@ -199,9 +199,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D50(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet50 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -219,7 +219,7 @@ class ResNet3D50(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet50(x) @@ -246,9 +246,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D101(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet101 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -266,7 +266,7 @@ class ResNet3D101(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet101(x, classes=classes) @@ -293,9 +293,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D152(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet152 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -313,7 +313,7 @@ class ResNet3D152(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet152(x, classes=classes) @@ -340,9 +340,9 @@ def __init__(self, inputs, blocks=None, include_top=True, classes=1000, freeze_b class ResNet3D200(ResNet3D): """ - Constructs a `keras.models.Model` according to the ResNet200 specifications. + Constructs a `tensorflow.keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -360,7 +360,7 @@ class ResNet3D200(ResNet3D): >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> model = keras_resnet.models.ResNet200(x, classes=classes) diff --git a/keras_resnet/models/_feature_pyramid_2d.py b/keras_resnet/models/_feature_pyramid_2d.py index fe3ce1a..2ec67d5 100644 --- a/keras_resnet/models/_feature_pyramid_2d.py +++ b/keras_resnet/models/_feature_pyramid_2d.py @@ -7,16 +7,16 @@ This module implements popular two-dimensional feature pyramid networks (FPNs). """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers -class FPN2D(keras.Model): +class FPN2D(tensorflow.keras.Model): def __init__( self, inputs, @@ -27,7 +27,7 @@ def __init__( *args, **kwargs ): - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 @@ -35,10 +35,10 @@ def __init__( if numerical_names is None: numerical_names = [True] * len(blocks) - x = keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) + x = tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False, name="conv1", padding="same")(inputs) x = keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn, name="bn_conv1")(x) - x = keras.layers.Activation("relu", name="conv1_relu")(x) - x = keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) + x = tensorflow.keras.layers.Activation("relu", name="conv1_relu")(x) + x = tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same", name="pool1")(x) features = 64 @@ -60,7 +60,7 @@ def __init__( c2, c3, c4, c5 = outputs - pyramid_5 = keras.layers.Conv2D( + pyramid_5 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -68,13 +68,13 @@ def __init__( name="c5_reduced" )(c5) - upsampled_p5 = keras.layers.UpSampling2D( + upsampled_p5 = tensorflow.keras.layers.UpSampling2D( interpolation="bilinear", name="p5_upsampled", size=(2, 2) )(pyramid_5) - pyramid_4 = keras.layers.Conv2D( + pyramid_4 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -82,17 +82,17 @@ def __init__( name="c4_reduced" )(c4) - pyramid_4 = keras.layers.Add( + pyramid_4 = tensorflow.keras.layers.Add( name="p4_merged" )([upsampled_p5, pyramid_4]) - upsampled_p4 = keras.layers.UpSampling2D( + upsampled_p4 = tensorflow.keras.layers.UpSampling2D( interpolation="bilinear", name="p4_upsampled", size=(2, 2) )(pyramid_4) - pyramid_4 = keras.layers.Conv2D( + pyramid_4 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -100,7 +100,7 @@ def __init__( name="p4" )(pyramid_4) - pyramid_3 = keras.layers.Conv2D( + pyramid_3 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -108,17 +108,17 @@ def __init__( name="c3_reduced" )(c3) - pyramid_3 = keras.layers.Add( + pyramid_3 = tensorflow.keras.layers.Add( name="p3_merged" )([upsampled_p4, pyramid_3]) - upsampled_p3 = keras.layers.UpSampling2D( + upsampled_p3 = tensorflow.keras.layers.UpSampling2D( interpolation="bilinear", name="p3_upsampled", size=(2, 2) )(pyramid_3) - pyramid_3 = keras.layers.Conv2D( + pyramid_3 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -126,7 +126,7 @@ def __init__( name="p3" )(pyramid_3) - pyramid_2 = keras.layers.Conv2D( + pyramid_2 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=1, strides=1, @@ -134,11 +134,11 @@ def __init__( name="c2_reduced" )(c2) - pyramid_2 = keras.layers.Add( + pyramid_2 = tensorflow.keras.layers.Add( name="p2_merged" )([upsampled_p3, pyramid_2]) - pyramid_2 = keras.layers.Conv2D( + pyramid_2 = tensorflow.keras.layers.Conv2D( filters=256, kernel_size=3, strides=1, @@ -146,7 +146,7 @@ def __init__( name="p2" )(pyramid_2) - pyramid_6 = keras.layers.MaxPooling2D(strides=2, name="p6")(pyramid_5) + pyramid_6 = tensorflow.keras.layers.MaxPooling2D(strides=2, name="p6")(pyramid_5) outputs = [ pyramid_2, diff --git a/keras_resnet/models/_time_distributed_2d.py b/keras_resnet/models/_time_distributed_2d.py index bb4947d..a952d70 100644 --- a/keras_resnet/models/_time_distributed_2d.py +++ b/keras_resnet/models/_time_distributed_2d.py @@ -7,10 +7,10 @@ This module implements popular time distributed two-dimensional residual networks. """ -import keras.backend -import keras.layers -import keras.models -import keras.regularizers +import tensorflow.keras.backend +import tensorflow.keras.layers +import tensorflow.keras.models +import tensorflow.keras.regularizers import keras_resnet.blocks import keras_resnet.layers @@ -18,9 +18,9 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, freeze_bn=True, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` object using the given block count. + Constructs a time distributed `tensorflow.keras.models.Model` object using the given block count. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -41,7 +41,7 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> blocks = [2, 2, 2, 2] @@ -49,24 +49,24 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, >>> y = keras_resnet.models.TimeDistributedResNet(x, classes, blocks, blocks) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ - if keras.backend.image_data_format() == "channels_last": + if tensorflow.keras.backend.image_data_format() == "channels_last": axis = 3 else: axis = 1 - x = keras.layers.TimeDistributed(keras.layers.ZeroPadding2D(padding=3), name="padding_conv1")(inputs) - x = keras.layers.TimeDistributed(keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False), name="conv1")(x) - x = keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn_conv1")(x) - x = keras.layers.TimeDistributed(keras.layers.Activation("relu"), name="conv1_relu")(x) - x = keras.layers.TimeDistributed(keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same"), name="pool1")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.ZeroPadding2D(padding=3), name="padding_conv1")(inputs) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Conv2D(64, (7, 7), strides=(2, 2), use_bias=False), name="conv1")(x) + x = tensorflow.keras.layers.TimeDistributed(keras_resnet.layers.BatchNormalization(axis=axis, epsilon=1e-5, freeze=freeze_bn), name="bn_conv1")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Activation("relu"), name="conv1_relu")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding="same"), name="pool1")(x) features = 64 @@ -82,20 +82,20 @@ def TimeDistributedResNet(inputs, blocks, block, include_top=True, classes=1000, if include_top: assert classes > 0 - x = keras.layers.TimeDistributed(keras.layers.GlobalAveragePooling2D(), name="pool5")(x) - x = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"), name="fc1000")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.GlobalAveragePooling2D(), name="pool5")(x) + x = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"), name="fc1000")(x) - return keras.models.Model(inputs=inputs, outputs=x, *args, **kwargs) + return tensorflow.keras.models.Model(inputs=inputs, outputs=x, *args, **kwargs) else: # Else output each stages features - return keras.models.Model(inputs=inputs, outputs=outputs, *args, **kwargs) + return tensorflow.keras.models.Model(inputs=inputs, outputs=outputs, *args, **kwargs) def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet18 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet18 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -111,15 +111,15 @@ def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet18(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -131,9 +131,9 @@ def TimeDistributedResNet18(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet34 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet34 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -149,15 +149,15 @@ def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet34(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -169,9 +169,9 @@ def TimeDistributedResNet34(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet50 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet50 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -185,15 +185,15 @@ def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet50(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -205,9 +205,9 @@ def TimeDistributedResNet50(inputs, blocks=None, include_top=True, classes=1000, def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet101 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet101 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -223,15 +223,15 @@ def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet101(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -243,9 +243,9 @@ def TimeDistributedResNet101(inputs, blocks=None, include_top=True, classes=1000 def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet152 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet152 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -261,15 +261,15 @@ def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet152(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ @@ -281,9 +281,9 @@ def TimeDistributedResNet152(inputs, blocks=None, include_top=True, classes=1000 def TimeDistributedResNet200(inputs, blocks=None, include_top=True, classes=1000, *args, **kwargs): """ - Constructs a time distributed `keras.models.Model` according to the ResNet200 specifications. + Constructs a time distributed `tensorflow.keras.models.Model` according to the ResNet200 specifications. - :param inputs: input tensor (e.g. an instance of `keras.layers.Input`) + :param inputs: input tensor (e.g. an instance of `tensorflow.keras.layers.Input`) :param blocks: the network’s residual architecture @@ -299,15 +299,15 @@ def TimeDistributedResNet200(inputs, blocks=None, include_top=True, classes=1000 >>> shape, classes = (224, 224, 3), 1000 - >>> x = keras.layers.Input(shape) + >>> x = tensorflow.keras.layers.Input(shape) >>> y = keras_resnet.models.TimeDistributedResNet200(x) - >>> y = keras.layers.TimeDistributed(keras.layers.Flatten())(y.output) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Flatten())(y.output) - >>> y = keras.layers.TimeDistributed(keras.layers.Dense(classes, activation="softmax"))(y) + >>> y = tensorflow.keras.layers.TimeDistributed(tensorflow.keras.layers.Dense(classes, activation="softmax"))(y) - >>> model = keras.models.Model(x, y) + >>> model = tensorflow.keras.models.Model(x, y) >>> model.compile("adam", "categorical_crossentropy", ["accuracy"]) """ diff --git a/tests/conftest.py b/tests/conftest.py index ebfd7b8..e3ddd8a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,4 @@ -import keras.layers +import tensorflow.keras.layers import pytest @@ -6,4 +6,4 @@ def x(): shape = (224, 224, 3) - return keras.layers.Input(shape) + return tensorflow.keras.layers.Input(shape) diff --git a/tools/export-caffe-weights.py b/tools/export-caffe-weights.py old mode 100755 new mode 100644 diff --git a/tools/import-caffe-weights.py b/tools/import-caffe-weights.py old mode 100755 new mode 100644 index dd84351..c56b9b2 --- a/tools/import-caffe-weights.py +++ b/tools/import-caffe-weights.py @@ -1,7 +1,7 @@ #!/usr/bin/env python import keras_resnet.models -import keras +import tensorflow.keras import h5py import argparse @@ -21,7 +21,7 @@ def create_model(resnet): if resnet not in valid: raise ValueError("Invalid resnet argument (valid: {}) : '{}'".format(valid, resnet)) - image = keras.layers.Input((None, None, 3)) + image = tensorflow.keras.layers.Input((None, None, 3)) if resnet == "resnet50": return keras_resnet.models.ResNet50(image) elif resnet == "resnet101": @@ -50,11 +50,11 @@ def parse_args(): # port each layer for index, l in enumerate(model.layers): - if isinstance(l, keras.layers.Conv2D): + if isinstance(l, tensorflow.keras.layers.Conv2D): l.set_weights([convert_conv_weights(weights.get(l.name).get("0"))]) - elif isinstance(l, keras.layers.Dense): + elif isinstance(l, tensorflow.keras.layers.Dense): l.set_weights(convert_dense_weights(weights.get(l.name).get("0"), weights.get(l.name).get("1"))) - elif isinstance(l, keras.layers.BatchNormalization): + elif isinstance(l, tensorflow.keras.layers.BatchNormalization): scale_name = l.name.replace("bn", "scale") bn_weights = weights.get(l.name) scale_weights = weights.get(scale_name)