Skip to content

Commit

Permalink
Introduce RealtimeSanitizer (RADSan) real-time safety checking (#121)
Browse files Browse the repository at this point in the history
* Introduce configurable RADSan checks

* Fix test fail output and comment RADSan config

* Re-enable workflow restrictions

* Add RADSan status badge to README

* Add realtime attribute to Layer methods

---------

Co-authored-by: jatinchowdhury18 <[email protected]>
  • Loading branch information
davidtrevelyan and jatinchowdhury18 authored Dec 1, 2023
1 parent 04cb333 commit 0485da9
Show file tree
Hide file tree
Showing 37 changed files with 536 additions and 437 deletions.
34 changes: 34 additions & 0 deletions .github/workflows/radsan.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
name: RADSan Real-Time Safety

on:
push:
branches:
- main
- develop
pull_request:
branches:
- main
- develop

jobs:
build_and_test:
name: Check real-time safety with RADSan
runs-on: ubuntu-latest
container: realtimesanitizer/radsan-clang:latest
steps:
- name: Install CMake and Git
run: apt-get update && apt-get install -y cmake git

- name: Checkout code
uses: actions/checkout@v2
with:
submodules: true

- name: Configure
run: cmake -Bbuild -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTS=ON -DRTNEURAL_ENABLE_RADSAN=ON

- name: Build
run: cmake --build build --config Release --parallel

- name: Test
run: cd build && ctest -C Release --output-on-failure
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
cmake_minimum_required(VERSION 3.5)
project(RTNeural VERSION 1.0.0)
include(cmake/CXXStandard.cmake)
include(cmake/Sanitizers.cmake)

add_subdirectory(RTNeural)

Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
[![Tests](https://github.com/jatinchowdhury18/RTNeural/workflows/Tests/badge.svg)](https://github.com/jatinchowdhury18/RTNeural/actions/workflows/tests.yml)
[![Bench](https://github.com/jatinchowdhury18/RTNeural/workflows/Bench/badge.svg)](https://github.com/jatinchowdhury18/RTNeural/actions/workflows/bench.yml)
[![Examples](https://github.com/jatinchowdhury18/RTNeural/actions/workflows/examples.yml/badge.svg)](https://github.com/jatinchowdhury18/RTNeural/actions/workflows/examples.yml)
[![RADSan](https://github.com/jatinchowdhury18/RTNeural/actions/workflows/radsan.yml/badge.svg)](https://github.com/jatinchowdhury18/RTNeural/actions/workflows/radsan.yml)
[![codecov](https://codecov.io/gh/jatinchowdhury18/RTNeural/branch/main/graph/badge.svg?token=QBEBVSCQTW)](https://codecov.io/gh/jatinchowdhury18/RTNeural)
[![arXiv](https://img.shields.io/badge/arXiv-2106.03037-b31b1b.svg)](https://arxiv.org/abs/2106.03037)
[![License](https://img.shields.io/badge/License-BSD-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
Expand Down
4 changes: 4 additions & 0 deletions RTNeural/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -51,3 +51,7 @@ target_compile_definitions(RTNeural
PUBLIC
RTNEURAL_NAMESPACE=${RTNEURAL_NAMESPACE}
)

if(RTNEURAL_ENABLE_RADSAN)
rtneural_radsan_configure(RTNeural)
endif()
7 changes: 4 additions & 3 deletions RTNeural/Model.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include "batchnorm/batchnorm.tpp"
#include "batchnorm/batchnorm2d.h"
#include "batchnorm/batchnorm2d.tpp"
#include "config.h"
#include "conv1d/conv1d.h"
#include "conv1d/conv1d.tpp"
#include "conv2d/conv2d.h"
Expand Down Expand Up @@ -71,14 +72,14 @@ class Model
}

/** Resets the state of the network layers. */
void reset()
RTNEURAL_REALTIME void reset()
{
for(auto* l : layers)
l->reset();
}

/** Performs forward propagation for this model. */
inline T forward(const T* input)
RTNEURAL_REALTIME inline T forward(const T* input)
{
layers[0]->forward(input, outs[0].data());

Expand All @@ -91,7 +92,7 @@ class Model
}

/** Returns a pointer to the output of the final layer in the network. */
inline const T* getOutputs() const noexcept
RTNEURAL_REALTIME inline const T* getOutputs() const noexcept
{
return outs.back().data();
}
Expand Down
12 changes: 6 additions & 6 deletions RTNeural/ModelT.h
Original file line number Diff line number Diff line change
Expand Up @@ -351,20 +351,20 @@ class ModelT

/** Get a reference to the layer at index `Index`. */
template <int Index>
auto& get() noexcept
RTNEURAL_REALTIME auto& get() noexcept
{
return std::get<Index>(layers);
}

/** Get a reference to the layer at index `Index`. */
template <int Index>
const auto& get() const noexcept
RTNEURAL_REALTIME const auto& get() const noexcept
{
return std::get<Index>(layers);
}

/** Resets the state of the network layers. */
void reset()
RTNEURAL_REALTIME void reset()
{
modelt_detail::forEachInTuple([&](auto& layer, size_t)
{ layer.reset(); },
Expand All @@ -373,7 +373,7 @@ class ModelT

/** Performs forward propagation for this model. */
template <int N = in_size>
inline typename std::enable_if<(N > 1), T>::type
RTNEURAL_REALTIME inline typename std::enable_if<(N > 1), T>::type
forward(const T* input)
{
#if RTNEURAL_USE_XSIMD
Expand All @@ -400,7 +400,7 @@ class ModelT

/** Performs forward propagation for this model. */
template <int N = in_size>
inline typename std::enable_if<N == 1, T>::type
RTNEURAL_REALTIME inline typename std::enable_if<N == 1, T>::type
forward(const T* input)
{
#if RTNEURAL_USE_XSIMD
Expand All @@ -426,7 +426,7 @@ class ModelT
}

/** Returns a pointer to the output of the final layer in the network. */
inline const T* getOutputs() const noexcept
RTNEURAL_REALTIME inline const T* getOutputs() const noexcept
{
return outs;
}
Expand Down
15 changes: 1 addition & 14 deletions RTNeural/RTNeural.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,8 @@
// C++ STL includes
#include <limits>

#ifndef RTNEURAL_NAMESPACE
#define RTNEURAL_NAMESPACE RTNeural
#endif

// Handle default RTNeural defines
#ifndef RTNEURAL_DEFAULT_ALIGNMENT
#if _MSC_VER
#pragma message("RTNEURAL_DEFAULT_ALIGNMENT was not defined! Using default alignment = 16.")
#else
#warning "RTNEURAL_DEFAULT_ALIGNMENT was not defined! Using default alignment = 16."
#endif
#define RTNEURAL_DEFAULT_ALIGNMENT 16
#endif

// RTNeural includes:
#include "config.h"
#include "Model.h"
#include "ModelT.h"
#include "model_loader.h"
Expand Down
41 changes: 21 additions & 20 deletions RTNeural/activation/activation.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#define ACTIVATION_H_INCLUDED

#include "../Layer.h"
#include "../config.h"
#include <functional>

namespace RTNEURAL_NAMESPACE
Expand All @@ -24,7 +25,7 @@ class Activation : public Layer<T>
std::string getName() const noexcept override { return name; }

/** Implements the forward propagation step for this layer. */
inline void forward(const T* input, T* out) noexcept override
RTNEURAL_REALTIME inline void forward(const T* input, T* out) noexcept override
{
for(int i = 0; i < Layer<T>::out_size; ++i)
out[i] = func(input[i]);
Expand Down Expand Up @@ -71,7 +72,7 @@ class TanhActivation final : public Activation<T>
}

/** Performs forward propagation for tanh activation. */
inline void forward(const T* input, T* out) noexcept override
RTNEURAL_REALTIME inline void forward(const T* input, T* out) noexcept override
{
for(int i = 0; i < Layer<T>::out_size; ++i)
out[i] = MathsProvider::tanh(input[i]);
Expand All @@ -94,10 +95,10 @@ class TanhActivationT
/** Returns true since this layer is an activation layer. */
constexpr bool isActivation() const noexcept { return true; }

void reset() { }
RTNEURAL_REALTIME void reset() { }

/** Performs forward propagation for tanh activation. */
inline void forward(const T (&ins)[size]) noexcept
RTNEURAL_REALTIME inline void forward(const T (&ins)[size]) noexcept
{
for(int i = 0; i < size; ++i)
outs[i] = MathsProvider::tanh(ins[i]);
Expand Down Expand Up @@ -142,10 +143,10 @@ class ReLuActivationT
/** Returns true since this layer is an activation layer. */
constexpr bool isActivation() const noexcept { return true; }

void reset() { }
RTNEURAL_REALTIME void reset() { }

/** Performs forward propagation for ReLU activation. */
inline void forward(const T (&ins)[size]) noexcept
RTNEURAL_REALTIME inline void forward(const T (&ins)[size]) noexcept
{
for(int i = 0; i < size; ++i)
outs[i] = std::max((T)0, ins[i]);
Expand Down Expand Up @@ -190,10 +191,10 @@ class SigmoidActivationT
/** Returns true since this layer is an activation layer. */
constexpr bool isActivation() const noexcept { return true; }

void reset() { }
RTNEURAL_REALTIME void reset() { }

/** Performs forward propagation for sigmoid activation. */
inline void forward(const T (&ins)[size]) noexcept
RTNEURAL_REALTIME inline void forward(const T (&ins)[size]) noexcept
{
for(int i = 0; i < size; ++i)
outs[i] = MathsProvider::sigmoid(ins[i]);
Expand Down Expand Up @@ -222,7 +223,7 @@ class SoftmaxActivation final : public Activation<T>
}

/** Performs forward propagation for softmax activation. */
inline void forward(const T* input, T* out) noexcept override
RTNEURAL_REALTIME inline void forward(const T* input, T* out) noexcept override
{
T exp_sum = 0;
for(int i = 0; i < Layer<T>::out_size; ++i)
Expand Down Expand Up @@ -255,10 +256,10 @@ class SoftmaxActivationT
/** Returns true since this layer is an activation layer. */
constexpr bool isActivation() const noexcept { return true; }

void reset() { }
RTNEURAL_REALTIME void reset() { }

/** Performs forward propagation for softmax activation. */
inline void forward(const T (&ins)[size]) noexcept
RTNEURAL_REALTIME inline void forward(const T (&ins)[size]) noexcept
{
T exp_sum = 0;
for(int i = 0; i < size; ++i)
Expand Down Expand Up @@ -297,7 +298,7 @@ class ELuActivation final : public Activation<T>
}

/** Sets a custom value for the layer's "alpha" parameter. */
void set_alpha(T newAlpha) { alpha = newAlpha; }
RTNEURAL_REALTIME void set_alpha(T newAlpha) { alpha = newAlpha; }

private:
T alpha = (T)1;
Expand All @@ -319,11 +320,11 @@ class ELuActivationT
/** Returns true since this layer is an activation layer. */
constexpr bool isActivation() const noexcept { return true; }

void reset() { }
RTNEURAL_REALTIME void reset() { }

/** Performs forward propagation for elu activation. */
template <int A_N = AlphaNumerator, int A_D = AlphaDenominator>
inline typename std::enable_if<A_N == 1 && A_D == 1, void>::type
RTNEURAL_REALTIME inline typename std::enable_if<A_N == 1 && A_D == 1, void>::type
forward(const T (&ins)[size]) noexcept
{
for(int i = 0; i < size; ++i)
Expand All @@ -332,7 +333,7 @@ class ELuActivationT

/** Performs forward propagation for elu activation (with custom alpha parameter). */
template <int A_N = AlphaNumerator, int A_D = AlphaDenominator>
inline typename std::enable_if<A_N != 1 || A_D != 1, void>::type
RTNEURAL_REALTIME inline typename std::enable_if<A_N != 1 || A_D != 1, void>::type
forward(const T (&ins)[size]) noexcept
{
static constexpr T alpha = (T)AlphaNumerator / (T)AlphaDenominator;
Expand All @@ -355,13 +356,13 @@ class PReLUActivation final : public Activation<T>
}

/** Performs forward propagation for prelu activation. */
inline void forward(const T* input, T* out) noexcept override
RTNEURAL_REALTIME inline void forward(const T* input, T* out) noexcept override
{
for(auto i = 0; i < Layer<T>::in_size; ++i)
out[i] = input[i] >= (T)0 ? input[i] : (input[i] * alpha[i]);
}

void setAlphaVals(const std::vector<T>& alphaVals)
RTNEURAL_REALTIME void setAlphaVals(const std::vector<T>& alphaVals)
{
if(alphaVals.size() == 1)
{
Expand Down Expand Up @@ -399,16 +400,16 @@ class PReLUActivationT
/** Returns false since this layer has weights even though it is an activation layer. */
constexpr bool isActivation() const noexcept { return false; }

void reset() { }
RTNEURAL_REALTIME void reset() { }

/** Performs forward propagation for prelu activation. */
inline void forward(const T (&ins)[size]) noexcept
RTNEURAL_REALTIME inline void forward(const T (&ins)[size]) noexcept
{
for(auto i = 0; i < size; ++i)
outs[i] = ins[i] >= (T)0 ? ins[i] : (ins[i] * alpha[i]);
}

void setAlphaVals(const std::vector<T>& alphaVals)
RTNEURAL_REALTIME void setAlphaVals(const std::vector<T>& alphaVals)
{
if(alphaVals.size() == 1)
{
Expand Down
Loading

0 comments on commit 0485da9

Please sign in to comment.