Skip to content

Commit

Permalink
Implementing other backends and tests
Browse files Browse the repository at this point in the history
  • Loading branch information
jatinchowdhury18 committed Nov 13, 2024
1 parent 395a9cc commit bf557dd
Show file tree
Hide file tree
Showing 14 changed files with 522 additions and 398 deletions.
4 changes: 2 additions & 2 deletions RTNeural/ModelT.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,8 @@ namespace modelt_detail
json_parser::debug_print("Loading a no-op layer!", debug);
}

template <typename T, int in_size, int out_size>
void loadLayer(DenseT<T, in_size, out_size>& dense, int& json_stream_idx, const nlohmann::json& l,
template <typename T, int in_size, int out_size, bool has_bias>
void loadLayer(DenseT<T, in_size, out_size, has_bias>& dense, int& json_stream_idx, const nlohmann::json& l,
const std::string& type, int layerDims, bool debug)
{
using namespace json_parser;
Expand Down
13 changes: 13 additions & 0 deletions RTNeural/config.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,19 @@
#define RTNEURAL_DEFAULT_ALIGNMENT 16
#endif

#if defined(_MSVC_LANG)
#define RTNEURAL_CPLUSPLUS _MSVC_LANG
#elif defined(__cplusplus)
#define RTNEURAL_CPLUSPLUS __cplusplus
#endif

#if defined(RTNEURAL_CPLUSPLUS) && RTNEURAL_CPLUSPLUS >= 201703L
#define RTNEURAL_HAS_CPP17 1
#define RTNEURAL_IF_CONSTEXPR if constexpr
#else
#define RTNEURAL_IF_CONSTEXPR if
#endif

/**
Facilitate testing real-time safety with RealtimeSanitizer (RADSan)
Expand Down
18 changes: 15 additions & 3 deletions RTNeural/dense/dense.h
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,8 @@ template <typename T>
class Dense final : public Layer<T>
{
public:
static constexpr bool dense_has_bias = true;

/** Constructs a dense layer for a given input and output size. */
Dense(int in_size, int out_size)
: Layer<T>(in_size, out_size)
Expand Down Expand Up @@ -157,14 +159,15 @@ class Dense final : public Layer<T>
* Static implementation of a fully-connected (dense) layer,
* with no activation.
*/
template <typename T, int in_sizet, int out_sizet>
template <typename T, int in_sizet, int out_sizet, bool has_bias = true>
class DenseT
{
static constexpr auto weights_size = in_sizet * out_sizet;

public:
static constexpr auto in_size = in_sizet;
static constexpr auto out_size = out_sizet;
static constexpr bool dense_has_bias = has_bias;

DenseT()
{
Expand All @@ -188,10 +191,19 @@ class DenseT
RTNEURAL_REALTIME void reset() { }

/** Performs forward propagation for this layer. */
RTNEURAL_REALTIME inline void forward(const T (&ins)[in_size]) noexcept
template <bool b = has_bias>
RTNEURAL_REALTIME inline typename std::enable_if<b>::type forward(const T (&ins)[in_size]) noexcept
{
for(int i = 0; i < out_size; ++i)
outs[i] = std::inner_product(ins, ins + in_size, &weights[i * in_size], bias[i]);
}

/** Performs forward propagation for this layer (no bias). */
template <bool b = has_bias>
RTNEURAL_REALTIME inline typename std::enable_if<! b>::type forward(const T (&ins)[in_size]) noexcept
{
for(int i = 0; i < out_size; ++i)
outs[i] = std::inner_product(ins, ins + in_size, &weights[i * in_size], (T)0) + bias[i];
outs[i] = std::inner_product(ins, ins + in_size, &weights[i * in_size], (T)0);
}

/**
Expand Down
20 changes: 16 additions & 4 deletions RTNeural/dense/dense_eigen.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
#define DENSEEIGEN_H_INCLUDED

#include "../Layer.h"
#include "../config.h"
#include "../common.h"
#include "../config.h"
#include <Eigen/Dense>

namespace RTNEURAL_NAMESPACE
Expand All @@ -17,6 +17,8 @@ template <typename T>
class Dense : public Layer<T>
{
public:
static constexpr bool dense_has_bias = true;

/** Constructs a dense layer for a given input and output size. */
Dense(int in_size, int out_size)
: Layer<T>(in_size, out_size)
Expand Down Expand Up @@ -123,18 +125,23 @@ template <typename T, int in_sizet, int out_sizet, bool has_bias = true>
class DenseT
{
using out_vec_type = Eigen::Matrix<T, out_sizet, 1>;
#if RTNEURAL_HAS_CPP17
using in_vec_type = typename std::conditional<has_bias, Eigen::Matrix<T, in_sizet + 1, 1>, Empty>::type;
#else
using in_vec_type = Eigen::Matrix<T, in_sizet + 1, 1>;
#endif
using mat_type = Eigen::Matrix<T, out_sizet, in_sizet + (has_bias ? 1 : 0)>;

public:
static constexpr auto in_size = in_sizet;
static constexpr auto out_size = out_sizet;
static constexpr bool dense_has_bias = has_bias;

DenseT()
: outs(outs_internal)
{
weights = mat_type::Zero();
if constexpr (has_bias)
RTNEURAL_IF_CONSTEXPR(has_bias)
{
ins_internal = in_vec_type::Zero();
ins_internal(in_size, 0) = (T)1;
Expand Down Expand Up @@ -167,7 +174,7 @@ class DenseT

/** Performs forward propagation for this layer (no bias). */
template <bool b = has_bias>
RTNEURAL_REALTIME inline typename std::enable_if<! b>::type forward(const Eigen::Matrix<T, in_size, 1>& ins) noexcept
RTNEURAL_REALTIME inline typename std::enable_if<!b>::type forward(const Eigen::Matrix<T, in_size, 1>& ins) noexcept
{
outs.noalias() = weights * ins;
}
Expand Down Expand Up @@ -202,7 +209,12 @@ class DenseT
* Sets the layer bias from a given array of size
* bias[out_size]
*/
RTNEURAL_REALTIME void setBias(const T* b)
#if RTNEURAL_HAS_CPP17
template <bool b = has_bias>
RTNEURAL_REALTIME inline typename std::enable_if<b>::type setBias(const T* b)
#else
RTNEURAL_REALTIME inline void setBias(const T* b)
#endif
{
for(int i = 0; i < out_size; ++i)
weights(i, in_size) = b[i];
Expand Down
Loading

0 comments on commit bf557dd

Please sign in to comment.