forked from CMU-Perceptual-Computing-Lab/openpose
-
Notifications
You must be signed in to change notification settings - Fork 0
/
7_synchronous_custom_input.cpp
236 lines (213 loc) · 11.4 KB
/
7_synchronous_custom_input.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
// ------------------------- OpenPose C++ API Tutorial - Example 7 - XXXXXXXXXXXXX -------------------------
// If the user wants to learn to use the OpenPose library, we highly recommend to start with the
// examples in `examples/tutorial_api_cpp/`.
// This example summarizes all the functionality of the OpenPose library:
// 1. Read folder of images / video / webcam (`producer` module)
// 2. Extract and render body keypoint / heatmap / PAF of that image (`pose` module)
// 3. Extract and render face keypoint / heatmap / PAF of that image (`face` module)
// 4. Save the results on disk (`filestream` module)
// 5. Display the rendered pose (`gui` module)
// Everything in a multi-thread scenario (`thread` module)
// Points 2 to 5 are included in the `wrapper` module
// In addition to the previous OpenPose modules, we also need to use:
// 1. `core` module:
// For the Array<float> class that the `pose` module needs
// For the Datum struct that the `thread` module sends between the queues
// 2. `utilities` module: for the error & logging functions, i.e. op::error & op::log respectively
// This file should only be used for the user to take specific examples.
// Command-line user intraface
#define OPENPOSE_FLAGS_DISABLE_PRODUCER
#include <openpose/flags.hpp>
// OpenPose dependencies
#include <openpose/headers.hpp>
// Custom OpenPose flags
// Producer
DEFINE_string(image_dir, "examples/media/",
"Process a directory of images. Read all standard formats (jpg, png, bmp, etc.).");
// If the user needs his own variables, he can inherit the op::Datum struct and add them in there.
// UserDatum can be directly used by the OpenPose wrapper because it inherits from op::Datum, just define
// WrapperT<std::vector<UserDatum>> instead of Wrapper (or equivalently WrapperT<std::vector<UserDatum>>)
struct UserDatum : public op::Datum
{
bool boolThatUserNeedsForSomeReason;
UserDatum(const bool boolThatUserNeedsForSomeReason_ = false) :
boolThatUserNeedsForSomeReason{boolThatUserNeedsForSomeReason_}
{}
};
// The W-classes can be implemented either as a template or as simple classes given
// that the user usually knows which kind of data he will move between the queues,
// in this case we assume a std::shared_ptr of a std::vector of UserDatum
// This worker will just read and return all the jpg files in a directory
class WUserInput : public op::WorkerProducer<std::shared_ptr<std::vector<UserDatum>>>
{
public:
WUserInput(const std::string& directoryPath) :
mImageFiles{op::getFilesOnDirectory(directoryPath, "jpg")},
// If we want "jpg" + "png" images
// mImageFiles{op::getFilesOnDirectory(directoryPath, std::vector<std::string>{"jpg", "png"})},
mCounter{0}
{
if (mImageFiles.empty())
op::error("No images found on: " + directoryPath, __LINE__, __FUNCTION__, __FILE__);
}
void initializationOnThread() {}
std::shared_ptr<std::vector<UserDatum>> workProducer()
{
try
{
// Close program when empty frame
if (mImageFiles.size() <= mCounter)
{
op::log("Last frame read and added to queue. Closing program after it is processed.",
op::Priority::High);
// This funtion stops this worker, which will eventually stop the whole thread system once all the
// frames have been processed
this->stop();
return nullptr;
}
else
{
// Create new datum
auto datumsPtr = std::make_shared<std::vector<UserDatum>>();
datumsPtr->emplace_back();
auto& datum = datumsPtr->at(0);
// Fill datum
datum.cvInputData = cv::imread(mImageFiles.at(mCounter++));
// If empty frame -> return nullptr
if (datum.cvInputData.empty())
{
op::log("Empty frame detected on path: " + mImageFiles.at(mCounter-1) + ". Closing program.",
op::Priority::High);
this->stop();
datumsPtr = nullptr;
}
return datumsPtr;
}
}
catch (const std::exception& e)
{
this->stop();
op::error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return nullptr;
}
}
private:
const std::vector<std::string> mImageFiles;
unsigned long long mCounter;
};
int tutorialApiCpp7()
{
try
{
op::log("Starting OpenPose demo...", op::Priority::High);
const auto timerBegin = std::chrono::high_resolution_clock::now();
// logging_level
op::check(0 <= FLAGS_logging_level && FLAGS_logging_level <= 255, "Wrong logging_level value.",
__LINE__, __FUNCTION__, __FILE__);
op::ConfigureLog::setPriorityThreshold((op::Priority)FLAGS_logging_level);
op::Profiler::setDefaultX(FLAGS_profile_speed);
// // For debugging
// // Print all logging messages
// op::ConfigureLog::setPriorityThreshold(op::Priority::None);
// // Print out speed values faster
// op::Profiler::setDefaultX(100);
// Applying user defined configuration - GFlags to program variables
// outputSize
const auto outputSize = op::flagsToPoint(FLAGS_output_resolution, "-1x-1");
// netInputSize
const auto netInputSize = op::flagsToPoint(FLAGS_net_resolution, "-1x368");
// faceNetInputSize
const auto faceNetInputSize = op::flagsToPoint(FLAGS_face_net_resolution, "368x368 (multiples of 16)");
// handNetInputSize
const auto handNetInputSize = op::flagsToPoint(FLAGS_hand_net_resolution, "368x368 (multiples of 16)");
// // producerType
// const auto producerSharedPtr = op::flagsToProducer(FLAGS_image_dir, FLAGS_video, FLAGS_ip_camera, FLAGS_camera,
// FLAGS_flir_camera, FLAGS_camera_resolution, FLAGS_camera_fps,
// FLAGS_camera_parameter_folder, !FLAGS_frame_keep_distortion,
// (unsigned int) FLAGS_3d_views, FLAGS_flir_camera_index);
// poseModel
const auto poseModel = op::flagsToPoseModel(FLAGS_model_pose);
// JSON saving
if (!FLAGS_write_keypoint.empty())
op::log("Flag `write_keypoint` is deprecated and will eventually be removed."
" Please, use `write_json` instead.", op::Priority::Max);
// keypointScale
const auto keypointScale = op::flagsToScaleMode(FLAGS_keypoint_scale);
// heatmaps to add
const auto heatMapTypes = op::flagsToHeatMaps(FLAGS_heatmaps_add_parts, FLAGS_heatmaps_add_bkg,
FLAGS_heatmaps_add_PAFs);
const auto heatMapScale = op::flagsToHeatMapScaleMode(FLAGS_heatmaps_scale);
// >1 camera view?
// const auto multipleView = (FLAGS_3d || FLAGS_3d_views > 1 || FLAGS_flir_camera);
const auto multipleView = false;
// Enabling Google Logging
const bool enableGoogleLogging = true;
// OpenPose wrapper
op::log("Configuring OpenPose...", op::Priority::High);
op::WrapperT<std::vector<UserDatum>> opWrapperT;
// Initializing the user custom classes
// Frames producer (e.g. video, webcam, ...)
auto wUserInput = std::make_shared<WUserInput>(FLAGS_image_dir);
// Add custom processing
const auto workerInputOnNewThread = true;
opWrapperT.setWorker(op::WorkerType::Input, wUserInput, workerInputOnNewThread);
// Pose configuration (use WrapperStructPose{} for default and recommended configuration)
const op::WrapperStructPose wrapperStructPose{
!FLAGS_body_disable, netInputSize, outputSize, keypointScale, FLAGS_num_gpu, FLAGS_num_gpu_start,
FLAGS_scale_number, (float)FLAGS_scale_gap, op::flagsToRenderMode(FLAGS_render_pose, multipleView),
poseModel, !FLAGS_disable_blending, (float)FLAGS_alpha_pose, (float)FLAGS_alpha_heatmap,
FLAGS_part_to_show, FLAGS_model_folder, heatMapTypes, heatMapScale, FLAGS_part_candidates,
(float)FLAGS_render_threshold, FLAGS_number_people_max, enableGoogleLogging};
opWrapperT.configure(wrapperStructPose);
// Face configuration (use op::WrapperStructFace{} to disable it)
const op::WrapperStructFace wrapperStructFace{
FLAGS_face, faceNetInputSize, op::flagsToRenderMode(FLAGS_face_render, multipleView, FLAGS_render_pose),
(float)FLAGS_face_alpha_pose, (float)FLAGS_face_alpha_heatmap, (float)FLAGS_face_render_threshold};
opWrapperT.configure(wrapperStructFace);
// Hand configuration (use op::WrapperStructHand{} to disable it)
const op::WrapperStructHand wrapperStructHand{
FLAGS_hand, handNetInputSize, FLAGS_hand_scale_number, (float)FLAGS_hand_scale_range, FLAGS_hand_tracking,
op::flagsToRenderMode(FLAGS_hand_render, multipleView, FLAGS_render_pose), (float)FLAGS_hand_alpha_pose,
(float)FLAGS_hand_alpha_heatmap, (float)FLAGS_hand_render_threshold};
opWrapperT.configure(wrapperStructHand);
// Extra functionality configuration (use op::WrapperStructExtra{} to disable it)
const op::WrapperStructExtra wrapperStructExtra{
FLAGS_3d, FLAGS_3d_min_views, FLAGS_identification, FLAGS_tracking, FLAGS_ik_threads};
opWrapperT.configure(wrapperStructExtra);
// Consumer (comment or use default argument to disable any output)
const op::WrapperStructOutput wrapperStructOutput{
op::flagsToDisplayMode(FLAGS_display, FLAGS_3d), !FLAGS_no_gui_verbose, FLAGS_fullscreen,
FLAGS_write_keypoint, op::stringToDataFormat(FLAGS_write_keypoint_format), FLAGS_write_json,
FLAGS_write_coco_json, FLAGS_write_coco_foot_json, FLAGS_write_images, FLAGS_write_images_format,
FLAGS_write_video, FLAGS_camera_fps, FLAGS_write_heatmaps, FLAGS_write_heatmaps_format,
FLAGS_write_video_adam, FLAGS_write_bvh, FLAGS_udp_host, FLAGS_udp_port};
opWrapperT.configure(wrapperStructOutput);
// Set to single-thread (for sequential processing and/or debugging and/or reducing latency)
if (FLAGS_disable_multi_thread)
opWrapperT.disableMultiThreading();
// Start, run, and stop processing - exec() blocks this thread until OpenPose wrapper has finished
op::log("Starting thread(s)...", op::Priority::High);
opWrapperT.exec();
// Measuring total time
const auto now = std::chrono::high_resolution_clock::now();
const auto totalTimeSec = (double)std::chrono::duration_cast<std::chrono::nanoseconds>(now-timerBegin).count()
* 1e-9;
const auto message = "OpenPose demo successfully finished. Total time: "
+ std::to_string(totalTimeSec) + " seconds.";
op::log(message, op::Priority::High);
// Return successful message
return 0;
}
catch (const std::exception& e)
{
op::error(e.what(), __LINE__, __FUNCTION__, __FILE__);
return -1;
}
}
int main(int argc, char *argv[])
{
// Parsing command line flags
gflags::ParseCommandLineFlags(&argc, &argv, true);
// Running tutorialApiCpp7
return tutorialApiCpp7();
}