25 #define OSVR_HAVE_BOOST
28 #include "../ConfigParams.h"
29 #include "../ConfigurationParser.h"
30 #include "../MakeHDKTrackingSystem.h"
31 #include "../TrackedBodyTarget.h"
44 #include <boost/algorithm/string/predicate.hpp>
45 #include <opencv2/core/core.hpp>
46 #include <opencv2/highgui/highgui.hpp>
47 #include <opencv2/imgproc/imgproc.hpp>
61 struct BodyIdOrdering {
62 bool operator()(BodyId
const &lhs, BodyId
const &rhs)
const {
63 return lhs.value() < rhs.value();
71 : camParamsDistorted_(osvr::vbtracker::getHDKCameraParameters()),
72 camParams_(camParamsDistorted_.createUndistortedVariant()),
73 params_(initialParams), extractor_(initialParams.
extractParams) {
76 params_.
debug =
false;
81 params_.
imu.path =
"";
84 system_ = makeHDKTrackingSystem(params_);
85 body_ = &(system_->getBody(bodyIdOfInterest));
86 target_ = body_->getTarget(targetIdOfInterest);
91 bool everHadPose()
const {
return everHadPose_; }
92 bool hasPose()
const {
return hasPose_; }
96 void outputCSV(std::ostream &os) { csv_.
output(os); }
105 using FrameTimeUnit = std::chrono::microseconds;
111 ImageOutputDataPtr imageProc(cv::Mat
const &frame);
116 const int fps_ = 100;
117 const FrameTimeUnit frameTime_ =
118 std::chrono::duration_cast<FrameTimeUnit>(std::chrono::seconds(1)) /
126 std::chrono::seconds getSeconds()
const;
130 FrameTimeUnit getFractionalRemainder()
const;
136 std::unique_ptr<TrackingSystem> system_;
140 LedMeasurementVec rawMeasurements_;
141 LedMeasurementVec undistortedMeasurements_;
144 std::size_t frame_ = 0;
145 bool hasPose_ =
false;
146 bool everHadPose_ =
false;
150 if ((frame_ % 100) == 0) {
151 std::cout <<
"Processing frame " << frame_ << std::endl;
158 auto imageData = imageProc(frame);
161 auto indices = system_->updateBodiesFromVideoData(std::move(imageData));
167 cv::Mat input = extractor_.getInputGrayImage();
168 const cv::Size sz = input.size();
171 auto getRect = [&](std::size_t i) {
172 return cv::Rect(cv::Point2i(i * sz.width, 0), sz);
175 const cv::Size largerSize{sz.width * 4, sz.height};
177 composite.create(largerSize, CV_8UC3);
179 auto copyToComposite = [&](cv::Mat
const &mat, std::size_t i) {
181 if (mat.channels() == 1) {
182 cv::cvtColor(mat, colorSrc, cv::COLOR_GRAY2BGR);
186 colorSrc.copyTo(composite(getRect(i)));
189 copyToComposite(input, 0);
190 copyToComposite(extractor_.getEdgeDetectedImage(), 1);
191 copyToComposite(extractor_.getEdgeDetectedBinarizedImage(), 2);
192 copyToComposite(generateBlobDebugImage(lastFrame_, extractor_), 3);
198 TrackerOfflineProcessing::imageProc(cv::Mat
const &frame) {
199 lastFrame_ = frame.clone();
201 ret->tv = currentTime_;
204 cv::cvtColor(frame, gray, cv::COLOR_BGR2GRAY);
205 ret->frameGray = gray;
206 ret->camParams = camParams_;
207 rawMeasurements_ = extractor_(gray, params_.
blobParams,
false);
208 undistortedMeasurements_ =
209 undistortLeds(rawMeasurements_, camParamsDistorted_);
210 ret->ledMeasurements = undistortedMeasurements_;
214 void TrackerOfflineProcessing::logRow() {
216 auto row = csv_.row();
225 hasPose_ = body_->hasPoseEstimate();
226 everHadPose_ |= hasPose_;
228 row <<
cell(
"TrackerDropped", hasPose_ ?
"" :
"0");
231 Eigen::Quaterniond quat = body_->getState().getQuaternion();
232 Eigen::Vector3d xlate = body_->getState().position();
234 << cellGroup<QuatAsEulerTag>(quat);
236 row <<
cell(
"Measurements", rawMeasurements_.size());
238 std::size_t numArea = 0;
239 std::size_t numCenterPointValue = 0;
240 std::size_t numCircularity = 0;
241 std::size_t numConvexity = 0;
242 for (
auto &reject : extractor_.getRejectList()) {
243 RejectReason reason = std::get<1>(reject);
246 case RejectReason::Area:
249 case RejectReason::CenterPointValue:
250 numCenterPointValue++;
252 case RejectReason::Circularity:
255 case RejectReason::Convexity:
263 auto getStatus = [&](TargetStatusMeasurement meas) {
264 return target_->getInternalStatusMeasurement(meas);
266 row <<
cell(
"Rejects.Area", numArea)
267 <<
cell(
"Rejects.CenterPointValue", numCenterPointValue)
268 <<
cell(
"Rejects.Circularity", numCircularity)
269 <<
cell(
"Rejects.Convexity", numConvexity)
270 <<
cell(
"Leds", getStatus(TargetStatusMeasurement::NumUsableLeds))
272 getStatus(TargetStatusMeasurement::NumUsedLeds));
274 row <<
cell(
"PosErrorVariance",
275 getStatus(TargetStatusMeasurement::MaxPosErrorVariance))
276 <<
cell(
"PosErrorVarianceBound",
278 TargetStatusMeasurement::PosErrorVarianceLimit));
284 template <
typename Rat>
inline std::size_t countDigits() {
285 auto denom = Rat::den;
286 auto ret = std::size_t{0};
296 std::ostringstream os;
297 os << getSeconds().count() <<
".";
298 using FractionalType = decltype(getFractionalRemainder());
299 using FractionalPeriod = FractionalType::period;
300 static const auto numDigits = countDigits<FractionalPeriod>();
301 os << std::setw(numDigits) << std::setfill(
'0')
302 << getFractionalRemainder().count();
306 inline std::chrono::seconds TrackerOfflineProcessing::getSeconds()
const {
310 inline TrackerOfflineProcessing::FrameTimeUnit
311 TrackerOfflineProcessing::getFractionalRemainder()
const {
313 auto ret = remainderFrames * frameTime_;
315 ret < std::chrono::seconds(1),
316 "This is a remainder - it must be less than a second!");
320 static bool g_saveFramesLostFix =
false;
322 bool processAVI(std::string
const &fn, TrackerOfflineProcessing &app) {
323 cv::VideoCapture capture;
325 if (!capture.isOpened()) {
326 std::cerr <<
"Could not open video file " << fn << std::endl;
331 while (capture.read(frame)) {
332 app.processFrame(frame);
333 if (g_saveFramesLostFix && !app.hasPose() && app.everHadPose()) {
335 std::ostringstream os;
336 os << fn <<
"." << app.carefullyFormatElapsedTime() <<
".png";
337 cv::Mat image = app.getDebugImage();
338 cv::imwrite(os.str(), image);
347 static const auto DEBUG_FRAMES_SWITCH =
"--save-debug-frames";
350 int main(
int argc,
char *argv[]) {
352 bool gotParams =
false;
354 std::vector<std::string> videoNames;
355 auto args = makeArgList(argc, argv);
358 auto numJson = handle_arg(args, [&](std::string
const &arg) {
359 if (!boost::iends_with(arg,
".json")) {
362 std::ifstream configFile(arg);
364 std::cerr <<
"Tried to load " << arg
365 <<
" as a config file but could not open it!"
367 throw std::invalid_argument(
368 "Could not open json config file passed");
372 if (!reader.parse(configFile, root)) {
373 std::cerr <<
"Could not parse " << arg <<
" as JSON! "
374 << reader.getFormattedErrorMessages() << std::endl;
375 throw std::runtime_error(
376 "Config file could not be parsed as JSON!");
378 params = osvr::vbtracker::parseConfigParams(root);
382 std::cerr <<
"At most one .json config file passed to this app!"
388 auto numVideoNames = handle_arg(args, [&](std::string
const &arg) {
389 auto ret = boost::iends_with(arg,
".avi");
391 videoNames.push_back(arg);
395 if (numVideoNames < 1) {
396 std::cerr <<
"Must pass at least one video filename to this app!"
401 osvr::vbtracker::g_saveFramesLostFix =
402 handle_has_iswitch(args, DEBUG_FRAMES_SWITCH);
403 if (osvr::vbtracker::g_saveFramesLostFix) {
404 std::cout <<
"Will save debug frames when tracker loses fix"
410 <<
"Unrecognized arguments left after parsing command line!"
414 }
catch (std::exception &e) {
415 std::cerr << e.what() << std::endl;
421 for (
auto &videoName : videoNames) {
422 std::cout <<
"Processing input video " << videoName << std::endl;
424 auto success = osvr::vbtracker::processAVI(videoName, app);
428 <<
" frames." << std::endl;
429 auto outname = videoName +
".csv";
430 std::cout <<
"Writing output data to: " << outname << std::endl;
431 std::ofstream of(outname);
433 std::cout <<
"Can't write to that file!" << std::endl;
438 std::cout <<
"File finished!\n\n" << std::endl;
440 std::cout <<
"File skipped!\n\n" << std::endl;
445 if (returnValue != 0) {
446 std::cerr <<
"One or more errors! Press enter to exit after reviewing "
bool streamBeaconDebugInfo
bool silent
For optimization usage.
bool performingOptimization
Header containing some simple args-handling routines: nothing sophisticated, just enough to keep the ...
std::size_t getFrameCount() const
To get a time that matches the timestamp.
BlobParams blobParams
Parameters specific to the blob-detection step of the algorithm.
std::string carefullyFormatElapsedTime() const
void output(std::ostream &os) const
detail::CellGroup< T > cellGroup(const char *groupHeader, T const &data)
Helper function to create a cell group with a group header prefix.
The Util library: Functionality not necessarily coupled to any particular core library, serving more as a common base layer behind all systems.
void processFrame(cv::Mat const &frame)
IMUInputParams imu
IMU input-related parameters.
General configuration parameters.
Header providing a C++ wrapper around TimeValueC.h.
bool debug
Whether to show the debug windows and debug messages.
int main(int argc, char *argv[])
OSVR_TimeValue_Microseconds microseconds
Microseconds portion of the time value.
Standardized, portable parallel to struct timeval for representing both absolute times and time inter...
detail::Cell< T > cell(const char *header, T const &data)
Helper free function to make a CSV cell.
void osvrTimeValueNormalize(OSVR_TimeValue *tv)
"Normalizes" a time value so that the absolute number of microseconds is less than 1...
EdgeHoleParams extractParams
Parameters specific to the edge hole based LED extraction algorithm.