Skip to content

Commit

Permalink
fix static code analysis
Browse files Browse the repository at this point in the history
Signed-off-by: szc321 <[email protected]>
  • Loading branch information
szc321 committed Jun 20, 2023
1 parent 559f9b0 commit 20ee9a0
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 160 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ def _req_callback(self, req):
not_empty = self.probe()
self._queue.put_nowait(0)
if self.observer and not not_empty:
self.observer()
self.observer() # pylint: disable=E1102
except EOFError:
pass

Expand Down
159 changes: 0 additions & 159 deletions src/lava/proc/dense/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,162 +346,3 @@ def run_spk(self):
)

self.recv_traces(s_in)


class AbstractPyDelayDenseModel(PyLoihiProcessModel):
"""Abstract Conn Process with Dense synaptic connections which incorporates
delays into the Conn Process.
"""
weights: np.ndarray = None
delays: np.ndarray = None
a_buff: np.ndarray = None

def calc_act(self, s_in) -> np.ndarray:
"""
Calculate the activation matrix based on s_in by performing
delay_wgts * s_in.
"""
# First calculating the activations through delay_wgts * s_in
# This matrix is then summed across each row to get the
# activations to the output neurons for different delays.
# This activation vector is reshaped to a matrix of the form
# (n_flat_output_neurons * (max_delay + 1), n_flat_output_neurons)
# which is then transposed to get the activation matrix.
return np.reshape(
np.sum(self.get_delay_wgts_mat(self.weights,
self.delays) * s_in, axis=1),
(np.max(self.delays) + 1, self.weights.shape[0])).T

@staticmethod
def get_delay_wgts_mat(weights, delays) -> np.ndarray:
"""
Create a matrix where the synaptic weights are separated
by their corresponding delays. The first matrix contains all the
weights, where the delay is equal to zero. The second matrix
contains all the weights, where the delay is equal to one and so on.
These matrices are then stacked together vertically.
Returns 2D matrix of form
(num_flat_output_neurons * max_delay + 1, num_flat_input_neurons) where
delay_wgts[
k * num_flat_output_neurons : (k + 1) * num_flat_output_neurons, :
]
contains the weights for all connections with a delay equal to k.
This allows for the updating of the activation buffer and updating
weights.
"""
return np.vstack([
np.where(delays == k, weights, 0)
for k in range(np.max(delays) + 1)
])

def update_act(self, s_in):
"""
Updates the activations for the connection.
Clears first column of a_buff and rolls them to the last column.
Finally, calculates the activations for the current time step and adds
them to a_buff.
This order of operations ensures that delays of 0 correspond to
the next time step.
"""
self.a_buff[:, 0] = 0
self.a_buff = np.roll(self.a_buff, -1)
self.a_buff += self.calc_act(s_in)


@implements(proc=DelayDense, protocol=LoihiProtocol)
@requires(CPU)
@tag("floating_pt")
class PyDelayDenseModelFloat(AbstractPyDelayDenseModel):
"""Implementation of Conn Process with Dense synaptic connections in
floating point precision. This short and simple ProcessModel can be used
for quick algorithmic prototyping, without engaging with the nuances of a
fixed point implementation. DelayDense incorporates delays into the Conn
Process.
"""
s_in: PyInPort = LavaPyType(PyInPort.VEC_DENSE, bool, precision=1)
a_out: PyOutPort = LavaPyType(PyOutPort.VEC_DENSE, float)
a_buff: np.ndarray = LavaPyType(np.ndarray, float)
# weights is a 2D matrix of form (num_flat_output_neurons,
# num_flat_input_neurons) in C-order (row major).
weights: np.ndarray = LavaPyType(np.ndarray, float)
# delays is a 2D matrix of form (num_flat_output_neurons,
# num_flat_input_neurons) in C-order (row major).
delays: np.ndarray = LavaPyType(np.ndarray, int)
num_message_bits: np.ndarray = LavaPyType(np.ndarray, int, precision=5)

def run_spk(self):
# The a_out sent on each timestep is a buffered value from dendritic
# accumulation at timestep t-1. This prevents deadlocking in
# networks with recurrent connectivity structures.
self.a_out.send(self.a_buff[:, 0])
if self.num_message_bits.item() > 0:
s_in = self.s_in.recv()
else:
s_in = self.s_in.recv().astype(bool)
self.update_act(s_in)


@implements(proc=DelayDense, protocol=LoihiProtocol)
@requires(CPU)
@tag("bit_accurate_loihi", "fixed_pt")
class PyDelayDenseModelBitAcc(AbstractPyDelayDenseModel):
"""Implementation of Conn Process with Dense synaptic connections that is
bit-accurate with Loihi's hardware implementation of Dense, which means,
it mimics Loihi behaviour bit-by-bit. DelayDense incorporates delays into
the Conn Process. Loihi 2 has a maximum of 6 bits for delays, meaning a
spike can be delayed by 0 to 63 time steps."""

s_in: PyInPort = LavaPyType(PyInPort.VEC_DENSE, bool, precision=1)
a_out: PyOutPort = LavaPyType(PyOutPort.VEC_DENSE, np.int32, precision=16)
a_buff: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=16)
# weights is a 2D matrix of form (num_flat_output_neurons,
# num_flat_input_neurons) in C-order (row major).
weights: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=8)
delays: np.ndarray = LavaPyType(np.ndarray, np.int32, precision=6)
num_message_bits: np.ndarray = LavaPyType(np.ndarray, int, precision=5)

def __init__(self, proc_params):
super().__init__(proc_params)
# Flag to determine whether weights have already been scaled.
self.weights_set = False

def run_spk(self):
self.weight_exp: int = self.proc_params.get("weight_exp", 0)

# Since this Process has no learning, weights are assumed to be static
# and only require scaling on the first timestep of run_spk().
if not self.weights_set:
num_weight_bits: int = self.proc_params.get("num_weight_bits", 8)
sign_mode: SignMode = self.proc_params.get("sign_mode") \
or determine_sign_mode(self.weights)

self.weights = clip_weights(self.weights, sign_mode, num_bits=8)
self.weights = truncate_weights(self.weights,
sign_mode,
num_weight_bits)
self.weights_set = True

# Check if delays are within Loihi 2 constraints
if np.max(self.delays) > 63:
raise ValueError("DelayDense Process 'delays' expects values "
f"between 0 and 63 for Loihi, got "
f"{self.delays}.")

# The a_out sent at each timestep is a buffered value from dendritic
# accumulation at timestep t-1. This prevents deadlocking in
# networks with recurrent connectivity structures.
self.a_out.send(self.a_buff[:, 0])
if self.num_message_bits.item() > 0:
s_in = self.s_in.recv()
else:
s_in = self.s_in.recv().astype(bool)

a_accum = self.calc_act(s_in)
self.a_buff[:, 0] = 0
self.a_buff = np.roll(self.a_buff, -1)
self.a_buff += (
np.left_shift(a_accum, self.weight_exp)
if self.weight_exp > 0
else np.right_shift(a_accum, -self.weight_exp)
)

0 comments on commit 20ee9a0

Please sign in to comment.