Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add netx support for rf.Dense and rf_iz.Dense #110

Merged
merged 10 commits into from
Nov 17, 2022
15 changes: 14 additions & 1 deletion src/lava/lib/dl/netx/blocks/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
from lava.magma.core.sync.protocols.loihi_protocol import LoihiProtocol
from lava.magma.core.resources import CPU

from lava.lib.dl.netx.blocks.process import Input, Dense, Conv
from lava.lib.dl.netx.blocks.process import Input, ComplexInput, Dense, Conv,\
ComplexDense


@requires(CPU)
Expand Down Expand Up @@ -47,12 +48,24 @@ def __init__(self, proc: AbstractProcess) -> None:
super().__init__(proc)


@implements(proc=ComplexInput, protocol=LoihiProtocol)
class PyComplexInputModel(AbstractPyBlockModel):
def __init__(self, proc: AbstractProcess) -> None:
super().__init__(proc)


@implements(proc=Dense, protocol=LoihiProtocol)
class PyDenseModel(AbstractPyBlockModel):
def __init__(self, proc: AbstractProcess) -> None:
super().__init__(proc)


@implements(proc=ComplexDense, protocol=LoihiProtocol)
class PyComplexDenseModel(AbstractPyBlockModel):
def __init__(self, proc: AbstractProcess) -> None:
super().__init__(proc)


@implements(proc=Conv, protocol=LoihiProtocol)
class PyConvModel(AbstractPyBlockModel):
def __init__(self, proc: AbstractProcess) -> None:
Expand Down
101 changes: 100 additions & 1 deletion src/lava/lib/dl/netx/blocks/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ def export_hdf5(self, handle: Union[h5py.File, h5py.Group]) -> None:

class Dense(AbstractBlock):
"""Dense layer block.

Parameters
----------
shape : tuple or list
Expand Down Expand Up @@ -164,6 +163,106 @@ def export_hdf5(self, handle: Union[h5py.File, h5py.Group]) -> None:
raise NotImplementedError


class ComplexDense(AbstractBlock):
"""Dense Complex layer block.

Parameters
----------
shape : tuple or list
shape of the layer block in (x, y, z)/WHC format.
neuron_params : dict, optional
dictionary of neuron parameters. Defaults to None.
weight_real : np.ndarray
synaptic real weight.
weight_imag : np.ndarray
synaptic imag weight.
has_graded_input : dict
flag for graded spikes at input. Defaults to False.
num_weight_bits_real : int
number of real weight bits. Defaults to 8.
num_weight_bits_imag : int
number of imag weight bits. Defaults to 8.
weight_exponent_real : int
real weight exponent value. Defaults to 0.
weight_exponent_imag : int
imag weight exponent value. Defaults to 0.
input_message_bits : int, optional
number of message bits in input spike. Defaults to 0 meaning unary
spike.
"""

def __init__(self, **kwargs: Union[dict, tuple, list, int, bool]) -> None:
super().__init__(**kwargs)

num_weight_bits_real = kwargs.pop('num_weight_bits_real', 8)
num_weight_bits_imag = kwargs.pop('num_weight_bits_imag', 8)

weight_exponent_real = kwargs.pop('weight_exponent_real', 0)
weight_exponent_imag = kwargs.pop('weight_exponent_imag', 0)
weight_real = kwargs.pop('weight_real')
weight_imag = kwargs.pop('weight_imag')

self.neuron = self._neuron(None)
self.real_synapse = DenseSynapse(
weights=weight_real,
weight_exp=weight_exponent_real,
num_weight_bits=num_weight_bits_real,
num_message_bits=self.input_message_bits,
)
self.imag_synapse = DenseSynapse(
weights=weight_imag,
weight_exp=weight_exponent_imag,
num_weight_bits=num_weight_bits_imag,
num_message_bits=self.input_message_bits,
)

if self.shape != self.real_synapse.a_out.shape:
raise RuntimeError(
f'Expected synapse output shape to be {self.shape[-1]}, '
f'found {self.synapse.a_out.shape}.'
)

self.inp = InPort(shape=self.real_synapse.s_in.shape)
self.out = OutPort(shape=self.neuron.s_out.shape)
self.inp.connect(self.real_synapse.s_in)
self.inp.connect(self.imag_synapse.s_in)
self.real_synapse.a_out.connect(self.neuron.a_real_in)
self.imag_synapse.a_out.connect(self.neuron.a_imag_in)
self.neuron.s_out.connect(self.out)

self._clean()

def export_hdf5(self, handle: Union[h5py.File, h5py.Group]) -> None:
raise NotImplementedError


class ComplexInput(AbstractBlock):
"""Input layer block.

Parameters
----------
shape : tuple or list
shape of the layer block in (x, y, z)/WHC format.
neuron_params : dict, optional
dictionary of neuron parameters. Defaults to None.
"""

def __init__(self, **kwargs: Union[dict, tuple, list, int, bool]) -> None:
super().__init__(**kwargs)
self.neuron = self._neuron(None)

self.inp = InPort(shape=self.neuron.a_real_in.shape)
self.inp.connect(self.neuron.a_real_in)
self.inp.connect(self.neuron.a_imag_in)
self.out = OutPort(shape=self.neuron.s_out.shape)
self.neuron.s_out.connect(self.out)

self._clean()

def export_hdf5(self, handle: Union[h5py.File, h5py.Group]) -> None:
raise NotImplementedError


class Conv(AbstractBlock):
"""Conv layer block.

Expand Down
110 changes: 86 additions & 24 deletions src/lava/lib/dl/netx/hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,19 @@
import warnings
from lava.magma.core.decorator import implements
from lava.magma.core.sync.protocols.loihi_protocol import LoihiProtocol
from lava.proc.rf.process import RF
from lava.proc.rf_iz.process import RF_IZ
import numpy as np
import h5py

from lava.magma.core.process.process import AbstractProcess
from lava.magma.core.process.ports.ports import InPort, OutPort
from lava.proc.lif.process import LIF, LIFReset
from lava.proc.sdn.process import Sigma, Delta, SigmaDelta
from lava.lib.dl.slayer.neuron.rf import neuron_params as get_rf_params
from lava.lib.dl.netx.utils import NetDict
from lava.lib.dl.netx.utils import optimize_weight_bits
from lava.lib.dl.netx.blocks.process import Input, Dense, Conv
from lava.lib.dl.netx.blocks.process import Input, Dense, Conv, ComplexDense
from lava.lib.dl.netx.blocks.models import AbstractPyBlockModel


Expand Down Expand Up @@ -170,6 +173,20 @@ def get_neuron_params(neuron_config: h5py.Group,
'state_exp': 6,
'num_message_bits': num_message_bits}
return neuron_params
elif "RF" in neuron_type:
if num_message_bits is None:
num_message_bits = 0 # default value
neuron_process = RF if "PHASE" in neuron_type else RF_IZ
neuron_params = get_rf_params(neuron_config)
neuron_params = {
'neuron_proc': neuron_process,
'vth': neuron_config['vThMant'],
'period': neuron_params['period'],
'alpha': neuron_params['decay'],
'state_exp': 6,
'decay_bits': 12
}
return neuron_params

@staticmethod
def _table_str(type_str: str = '',
Expand Down Expand Up @@ -293,34 +310,66 @@ def create_dense(layer_config: h5py.Group,
table entry string for process.
"""
shape = (np.prod(layer_config['shape']),)

neuron_params = Network.get_neuron_params(layer_config['neuron'],
reset_interval=reset_interval,
reset_offset=reset_offset)
weight = layer_config['weight']
if weight.ndim == 1:
weight = weight.reshape(shape[0], -1)

opt_weights = optimize_weight_bits(weight)
weight, num_weight_bits, weight_exponent, sign_mode = opt_weights

# arguments for dense block
params = {'shape': shape,
'neuron_params': neuron_params,
'weight': weight,
'num_weight_bits': num_weight_bits,
'weight_exponent': weight_exponent,
'sign_mode': sign_mode,
'input_message_bits': input_message_bits}

# optional arguments
if 'bias' in layer_config.keys():
params['bias'] = layer_config['bias']
# check to see for nested weights
if isinstance(layer_config["weight"], NetDict):
weight_real = layer_config['weight/real']
weight_imag = layer_config['weight/imag']
if weight_real.ndim == 1:
weight_real = weight_real.reshape(shape[0], -1)
weight_imag = weight_imag.reshape(shape[0], -1)

opt_weights_real = optimize_weight_bits(weight_real)
opt_weights_imag = optimize_weight_bits(weight_imag)
weight_real, num_weight_bits_real, weight_exponent_real,\
sign_mode_real = opt_weights_real
weight_imag, num_weight_bits_imag, weight_exponent_imag,\
sign_mode_imag = opt_weights_imag

# arguments for complex dense block
params = {'shape': shape,
'neuron_params': neuron_params,
'weight_real': weight_real,
'weight_imag': weight_imag,
'num_weight_bits_real': num_weight_bits_real,
'num_weight_bits_imag': num_weight_bits_imag,
'weight_exponent_real': weight_exponent_real,
'weight_exponent_imag': weight_exponent_imag,
'sign_mode_real': sign_mode_real,
'sign_mode_imag': sign_mode_imag,
'input_message_bits': input_message_bits}

proc = ComplexDense(**params)

else:
weight = layer_config['weight']
if weight.ndim == 1:
weight = weight.reshape(shape[0], -1)

opt_weights = optimize_weight_bits(weight)
weight, num_weight_bits, weight_exponent, sign_mode = opt_weights

# arguments for dense block
params = {'shape': shape,
'neuron_params': neuron_params,
'weight': weight,
'num_weight_bits': num_weight_bits,
'weight_exponent': weight_exponent,
'sign_mode': sign_mode,
'input_message_bits': input_message_bits}

# optional arguments
if 'bias' in layer_config.keys():
params['bias'] = layer_config['bias']

proc = Dense(**params)
table_entry = Network._table_str(type_str='Dense', width=1, height=1,
channel=shape[0],
delay='delay' in layer_config.keys())

return Dense(**params), table_entry
return proc, table_entry

@staticmethod
def create_conv(layer_config: h5py.Group,
Expand Down Expand Up @@ -437,7 +486,6 @@ def _create(self) -> List[AbstractProcess]:
reset_offset = self.reset_offset + 1 # time starts from 1 in hardware
for i in range(num_layers):
layer_type = layer_config[i]['type']

if layer_type == 'input':
table = None
if 'neuron' in layer_config[i].keys():
Expand Down Expand Up @@ -510,7 +558,21 @@ def _create(self) -> List[AbstractProcess]:
else:
if len(layers) > 1:
layers[-2].out.connect(layers[-1].inp)

elif layer_type == "dense_comp":
layer, table = self.create_complex_dense(
layer_config=layer_config[i],
input_message_bits=input_message_bits
)
layers.append(layer)
input_message_bits = layer.output_message_bits
if flatten_next:
layers[-2].out.transpose([2, 1, 0]).flatten().connect(
layers[-1].inp
)
flatten_next = False
else:
if len(layers) > 1:
layers[-2].out.connect(layers[-1].inp)
elif layer_type == 'average':
raise NotImplementedError(f'{layer_type} is not implemented.')

Expand Down
5 changes: 3 additions & 2 deletions src/lava/lib/dl/netx/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import h5py
import numpy as np
from enum import IntEnum, unique
import torch


@unique
Expand Down Expand Up @@ -45,9 +46,9 @@ def __init__(
self.array_keys = [
'shape', 'stride', 'padding', 'dilation', 'groups', 'delay',
'iDecay', 'refDelay', 'scaleRho', 'tauRho', 'theta', 'vDecay',
'vThMant', 'wgtExp',
'vThMant', 'wgtExp', 'sinDecay', 'cosDecay', "complex_synapse"
]
self.copy_keys = ['weight', 'bias']
self.copy_keys = ['weight', 'bias', 'weight/real', 'weight/imag']

def keys(self) -> h5py._hl.base.KeysViewHDF5:
return self.f.keys()
Expand Down
5 changes: 4 additions & 1 deletion src/lava/lib/dl/slayer/block/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,17 +546,19 @@ def weight(s):
def delay(d):
return torch.floor(d.delay).flatten().cpu().data.numpy()

# dense descriptors
handle.create_dataset(
'type', (1, ), 'S10', ['dense'.encode('ascii', 'ignore')]
)

handle.create_dataset('shape', data=np.array(self.neuron.shape))
handle.create_dataset('inFeatures', data=self.synapse.in_channels)
handle.create_dataset('outFeatures', data=self.synapse.out_channels)

if self.synapse.weight_norm_enabled:
self.synapse.disable_weight_norm()

if hasattr(self.synapse, 'imag'): # complex synapse
handle.create_dataset("complex_synapse", data=np.array(True))
handle.create_dataset(
'weight/real',
data=weight(self.synapse.real)
Expand All @@ -566,6 +568,7 @@ def delay(d):
data=weight(self.synapse.imag)
)
else:
handle.create_dataset("complex_synapse", data=np.array(False))
handle.create_dataset('weight', data=weight(self.synapse))

# bias
Expand Down
4 changes: 2 additions & 2 deletions src/lava/lib/dl/slayer/neuron/rf.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ def neuron_params(device_params, scale=1 << 6, p_scale=1 << 12):
dictionary of neuron parameters that can be used to initialize neuron
class.
"""
sin_decay = device_params['sinDecay'] / p_scale,
cos_decay = device_params['cosDecay'] / p_scale,
sin_decay = device_params['sinDecay'] / p_scale
cos_decay = device_params['cosDecay'] / p_scale
decay = 1 - np.sqrt(sin_decay ** 2 + cos_decay ** 2)
frequency = np.arctan2(sin_decay, cos_decay) / 2 / np.pi
return {
Expand Down
4 changes: 2 additions & 2 deletions src/lava/lib/dl/slayer/neuron/rf_iz.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,8 @@ def neuron_params(device_params, scale=1 << 6, p_scale=1 << 12):
dictionary of neuron parameters that can be used to initialize neuron
class.
"""
sin_decay = device_params['sinDecay'] / p_scale,
cos_decay = device_params['cosDecay'] / p_scale,
sin_decay = device_params['sinDecay'] / p_scale
cos_decay = device_params['cosDecay'] / p_scale
decay = 1 - np.sqrt(sin_decay ** 2 + cos_decay ** 2)
frequency = np.arctan2(sin_decay, cos_decay) / 2 / np.pi
return {
Expand Down
4 changes: 4 additions & 0 deletions src/lava/lib/dl/slayer/synapse/complex.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,10 @@ def __init__(
weight_scale, weight_norm, pre_hook_fx
)

self.in_channels = self.real.in_channels
self.out_channels = self.real.out_channels
self.weight_norm_enabled = self.real.weight_norm_enabled


class Conv(ComplexLayer):
"""Convolution complex-synapse layer.
Expand Down
Binary file not shown.
Binary file added tests/lava/lib/dl/netx/gts/complex_dense/in.npy
Binary file not shown.
Binary file added tests/lava/lib/dl/netx/gts/complex_dense/out.npy
Binary file not shown.
Binary file not shown.
Binary file not shown.
Loading