Skip to content

Commit

Permalink
Merge branch 'dev-0.12'
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelgruner committed Aug 5, 2022
2 parents 40fbf63 + 620339f commit ff3d399
Show file tree
Hide file tree
Showing 16 changed files with 440 additions and 10 deletions.
4 changes: 4 additions & 0 deletions examples/r2i/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,7 @@ endif
if cdata.get('HAVE_TENSORRT') == true
subdir('tensorrt')
endif

if cdata.get('HAVE_NNAPI') == true
subdir('nnapi')
endif
199 changes: 199 additions & 0 deletions examples/r2i/nnapi/inception.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
/* Copyright (C) 2018-2021 RidgeRun, LLC (http://www.ridgerun.com)
* All Rights Reserved.
*
* The contents of this software are proprietary and confidential to RidgeRun,
* LLC. No part of this program may be photocopied, reproduced or translated
* into another programming language without prior written consent of
* RidgeRun, LLC. The user is free to modify the source code after obtaining
* a software license from RidgeRun. All source code changes must be provided
* back to RidgeRun without any encumbrance.
*/

#include <getopt.h>
#include <r2i/r2i.h>
#include <iostream>
#include <memory>
#include <string>

#define STB_IMAGE_IMPLEMENTATION
#include "./stb_image.h"

#define STB_IMAGE_RESIZE_IMPLEMENTATION
#include "./stb_image_resize.h"

void PrintTopPrediction(std::shared_ptr<r2i::IPrediction> prediction) {
r2i::RuntimeError error;
int index = 0;
double max = -1;
int num_labels = prediction->GetResultSize() / sizeof(float);

for (int i = 0; i < num_labels; ++i) {
double current = prediction->At(i, error);
if (current > max) {
max = current;
index = i;
}
}

std::cout << "Highest probability is label " << index << " (" << max << ")"
<< std::endl;
}

void PrintUsage() {
std::cerr << "Required arguments: "
<< "-i [JPG input_image] "
<< "-m [Inception TfLite Model] "
<< "-s [Model Input Size] "
<< "-I [Input Node] "
<< "-O [Output Node] \n"
<< " Example: "
<< " ./inception -i cat.jpg -m graph_inceptionv2_tensorflow.pb "
<< "-s 224" << std::endl;
}

std::unique_ptr<float[]> PreProcessImage(const unsigned char *input, int width,
int height, int reqwidth,
int reqheight) {
const int channels = 3;
const int scaled_size = channels * reqwidth * reqheight;
std::unique_ptr<unsigned char[]> scaled(new unsigned char[scaled_size]);
std::unique_ptr<float[]> adjusted(new float[scaled_size]);

stbir_resize_uint8(input, width, height, 0, scaled.get(), reqwidth, reqheight,
0, channels);

for (int i = 0; i < scaled_size; i += channels) {
/* RGB = (RGB - Mean)*StdDev */
adjusted[i + 0] = (static_cast<float>(scaled[i + 0]) - 127.5) / 127.5;
adjusted[i + 1] = (static_cast<float>(scaled[i + 1]) - 127.5) / 127.5;
adjusted[i + 2] = (static_cast<float>(scaled[i + 2]) - 127.5) / 127.5;
}

return adjusted;
}

std::unique_ptr<float[]> LoadImage(const std::string &path, int reqwidth,
int reqheight) {
int channels = 3;
int width, height, cp;

unsigned char *img = stbi_load(path.c_str(), &width, &height, &cp, channels);
if (!img) {
std::cerr << "The picture " << path << " could not be loaded";
return nullptr;
}

auto ret = PreProcessImage(img, width, height, reqwidth, reqheight);
free(img);

return ret;
}

bool ParseArgs(int &argc, char *argv[], std::string &image_path,
std::string &model_path, int &index, int &size,
std::string &in_node, std::string &out_node) {
int option = 0;
while ((option = getopt(argc, argv, "i:m:p:s:I:O:")) != -1) {
switch (option) {
case 'i':
image_path = optarg;
break;
case 'm':
model_path = optarg;
break;
case 'p':
index = std::stoi(optarg);
break;
case 's':
size = std::stoi(optarg);
break;
case 'I':
in_node = optarg;
break;
case 'O':
out_node = optarg;
break;
default:
return false;
}
}
return true;
}

int main(int argc, char *argv[]) {
r2i::RuntimeError error;
std::string model_path;
std::string image_path;
std::string in_node;
std::string out_node;
int Index = 0;
int size = 0;

if (false == ParseArgs(argc, argv, image_path, model_path, Index, size,
in_node, out_node)) {
PrintUsage();
exit(EXIT_FAILURE);
}

if (image_path.empty() || model_path.empty()) {
PrintUsage();
exit(EXIT_FAILURE);
}

auto factory =
r2i::IFrameworkFactory::MakeFactory(r2i::FrameworkCode::NNAPI, error);

if (nullptr == factory) {
std::cerr << "TFLite NNAPI backend could not be built " << error
<< std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Loading Model: " << model_path << std::endl;
auto loader = factory->MakeLoader(error);
std::shared_ptr<r2i::IModel> model = loader->Load(model_path, error);
if (error.IsError()) {
std::cerr << "Loader error: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Setting model to engine" << std::endl;
std::shared_ptr<r2i::IEngine> engine = factory->MakeEngine(error);
error = engine->SetModel(model);

std::cout << "Loading image: " << image_path << std::endl;
std::unique_ptr<float[]> image_data = LoadImage(image_path, size, size);

std::cout << "Configuring frame" << std::endl;
std::shared_ptr<r2i::IFrame> frame = factory->MakeFrame(error);

error = frame->Configure(image_data.get(), size, size,
r2i::ImageFormat::Id::RGB, r2i::DataType::Id::FLOAT);

std::cout << "Starting engine" << std::endl;
error = engine->Start();
if (error.IsError()) {
std::cerr << "Engine start error: " << error << std::endl;
exit(EXIT_FAILURE);
}

std::cout << "Predicting..." << std::endl;
std::vector<std::shared_ptr<r2i::IPrediction>> predictions;
error = engine->Predict(frame, predictions);
if (error.IsError()) {
std::cerr << "Engine prediction error: " << error << std::endl;
exit(EXIT_FAILURE);
}

/* This model only has one output */
PrintTopPrediction(predictions[0]);

std::cout << "Stopping engine" << std::endl;
error = engine->Stop();
if (error.IsError()) {
std::cerr << "Engine stop error: " << error << std::endl;
exit(EXIT_FAILURE);
}

return EXIT_SUCCESS;
}
11 changes: 11 additions & 0 deletions examples/r2i/nnapi/meson.build
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# Compile examples
app_examples = [
'inception',
]

foreach app : app_examples
executable(app, '@[email protected]'.format(app),
include_directories: [configinc, common_inc_dir],
dependencies : [r2inference_lib_dep],
install: false)
endforeach
15 changes: 12 additions & 3 deletions meson.build
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
project('r2inference', ['cpp'], default_options : ['cpp_std=c++11'],
version : '0.11.0',
version : '0.12.0',
meson_version : '>= 0.50',)

# Set project information
Expand Down Expand Up @@ -41,6 +41,7 @@ cdata.set('HAVE_TENSORRT', false)
cdata.set('HAVE_ONNXRT', false)
cdata.set('HAVE_ONNXRT_ACL', false)
cdata.set('HAVE_ONNXRT_OPENVINO', false)
cdata.set('HAVE_NNAPI', false)

# Define library dependencies for Tensorflow support
if get_option('enable-tensorflow')
Expand All @@ -51,11 +52,13 @@ if get_option('enable-tensorflow')
endif

# Define library dependencies for Tensorflow Lite support
if get_option('enable-tflite') or get_option('enable-coral')
if get_option('enable-tflite') or get_option('enable-coral') or get_option('enable-nnapi')
dl = cpp.find_library('dl', required: true)
dl_dep = declare_dependency(dependencies: dl)
tensorflow_lite = cpp.find_library('tensorflow-lite', required: true)
tensorflow_lite_dep = declare_dependency(dependencies: tensorflow_lite)
thread_dep = dependency('threads')
lib_tflite_dep = [tensorflow_lite_dep, thread_dep, common_deps]
lib_tflite_dep = [tensorflow_lite_dep, thread_dep, dl_dep, common_deps]
cdata.set('HAVE_TFLITE', true)
endif

Expand All @@ -70,6 +73,12 @@ if get_option('enable-coral')
cdata.set('HAVE_CORAL', true)
endif

# Define library dependencies for NNAPI TensorFlow Lite delegate
if get_option('enable-nnapi')
lib_nnapi_dep = [tensorflow_lite_dep, thread_dep, dl_dep, rt_dep, common_deps]
cdata.set('HAVE_NNAPI', true)
endif

# Define library dependencies for TensorRT support
if get_option('enable-tensorrt')

Expand Down
2 changes: 2 additions & 0 deletions meson_options.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,5 @@ option('enable-onnxrt-acl', type : 'boolean', value: false,
description : 'Enable ONNX Runtime backend with ACL execution provider support')
option('enable-onnxrt-openvino', type : 'boolean', value: false,
description : 'Enable ONNX Runtime backend with OpenVINO execution provider support')
option('enable-nnapi', type : 'boolean', value : false,
description : 'Enable NNAPI delegate for NPU inference execution support')
11 changes: 8 additions & 3 deletions r2i/frameworks.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
* RidgeRun, LLC. The user is free to modify the source code after obtaining
* a software license from RidgeRun. All source code changes must be provided
* back to RidgeRun without any encumbrance.
*/
*/

#ifndef R2I_FRAMEWORKS_H
#define R2I_FRAMEWORKS_H
Expand Down Expand Up @@ -57,12 +57,17 @@ enum FrameworkCode {
*/
TENSORRT,

/**
* Android's NPU delegate
*/
NNAPI,

/**
* Number of supported frameworks, mostly for testing purposes.
*/
MAX_FRAMEWORK
};

} //namespace r2i
} // namespace r2i

#endif //R2I_FRAMEWORKS
#endif // R2I_FRAMEWORKS
13 changes: 13 additions & 0 deletions r2i/iframeworkfactory.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

#include "config.h"
#include "coral/frameworkfactory.h"
#include "nnapi/frameworkfactory.h"
#include "onnxrt/frameworkfactory.h"
#include "onnxrt_acl/frameworkfactory.h"
#include "onnxrt_openvino/frameworkfactory.h"
Expand Down Expand Up @@ -81,6 +82,14 @@ MakeTensorRTFactory (RuntimeError &error) {
}
#endif // HAVE_TENSORRT

#ifdef HAVE_NNAPI
static std::unique_ptr<IFrameworkFactory>
MakeNNAPIFactory (RuntimeError &error) {
return std::unique_ptr<nnapi::FrameworkFactory> (new
nnapi::FrameworkFactory);
}
#endif // HAVE_NNAPI

typedef std::function<std::unique_ptr<IFrameworkFactory>(RuntimeError &)>
MakeFactory;
const std::unordered_map<int, MakeFactory> frameworks ({
Expand Down Expand Up @@ -113,6 +122,10 @@ const std::unordered_map<int, MakeFactory> frameworks ({
{FrameworkCode::TENSORRT, MakeTensorRTFactory},
#endif //HAVE_TENSORRT

#ifdef HAVE_NNAPI
{FrameworkCode::NNAPI, MakeNNAPIFactory},
#endif //HAVE_NNAPI

});

std::unique_ptr<IFrameworkFactory>
Expand Down
5 changes: 5 additions & 0 deletions r2i/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,11 @@ if cdata.get('HAVE_ONNXRT_OPENVINO') == true
r2inference_internal_dep += [internal_onnxrt_openvino_dep]
endif

if cdata.get('HAVE_NNAPI') == true
subdir('nnapi')
r2inference_internal_dep += [internal_nnapi_dep]
endif

# Define source code
r2inference_sources = [
'classification.cc',
Expand Down
45 changes: 45 additions & 0 deletions r2i/nnapi/engine.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/* Copyright (C) 2021 RidgeRun, LLC (http://www.ridgerun.com)
* All Rights Reserved.
*
* The contents of this software are proprietary and confidential to RidgeRun,
* LLC. No part of this program may be photocopied, reproduced or translated
* into another programming language without prior written consent of
* RidgeRun, LLC. The user is free to modify the source code after obtaining
* a software license from RidgeRun. All source code changes must be provided
* back to RidgeRun without any encumbrance.
*/

#include "r2i/nnapi/engine.h"

#include <tensorflow/lite/delegates/nnapi/nnapi_delegate.h>
#include <tensorflow/lite/tools/delegates/delegate_provider.h>
#include <tensorflow/lite/tools/evaluation/utils.h>

namespace r2i {
namespace nnapi {

Engine::Engine() : tflite::Engine() { this->number_of_threads = 1; }

RuntimeError Engine::ConfigureDelegate(::tflite::Interpreter *interpreter) {
RuntimeError error;
::tflite::StatefulNnApiDelegate::Options options;
options.allow_fp16 = true;
options.allow_dynamic_dimensions = true;
options.disallow_nnapi_cpu = false;
options.accelerator_name = "vsi-npu";

auto delegate = ::tflite::evaluation::CreateNNAPIDelegate(options);

if (!delegate) {
error.Set(RuntimeError::Code::DELEGATE_ERROR,
"NNAPI delegate was not well created");
} else {
interpreter->ModifyGraphWithDelegate(std::move(delegate));
}
return error;
}

Engine::~Engine() { this->Stop(); }

} // namespace nnapi
} // namespace r2i
Loading

0 comments on commit ff3d399

Please sign in to comment.