OSVR Framework (Internal Development Docs)  0.6-1962-g59773924
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Modules Pages
VideoTrackerCalibrationUtility.cpp
Go to the documentation of this file.
1 
11 // Copyright 2015 Sensics, Inc.
12 //
13 // Licensed under the Apache License, Version 2.0 (the "License");
14 // you may not use this file except in compliance with the License.
15 // You may obtain a copy of the License at
16 //
17 // http://www.apache.org/licenses/LICENSE-2.0
18 //
19 // Unless required by applicable law or agreed to in writing, software
20 // distributed under the License is distributed on an "AS IS" BASIS,
21 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
22 // See the License for the specific language governing permissions and
23 // limitations under the License.
24 
25 // Internal Includes
26 #include "ConfigurationParser.h"
27 #include "ImageSourceFactories.h"
28 #include "VideoBasedTracker.h"
30 #include "HDKData.h"
32 #include "CameraParameters.h"
33 #include "SetupSensors.h"
34 
35 #include "CVTwoStepProgressBar.h"
36 
38 #include <osvr/Common/JSONEigen.h>
39 
40 // Library/third-party includes
41 #include <opencv2/highgui/highgui.hpp>
42 #include <opencv2/imgproc/imgproc.hpp>
43 #include <json/reader.h>
44 #include <json/value.h>
45 
46 // Standard includes
47 #include <memory>
48 #include <iostream>
49 #include <unordered_set>
50 
51 using std::endl;
52 using namespace osvr::vbtracker;
53 
54 namespace myDetail {
56  public:
57  StreamPrefixer(const char *prefix, std::ostream &os)
58  : m_prefix(prefix), m_os(&os) {}
59  template <typename T> std::ostream &operator<<(T val) {
60  return (*m_os) << m_prefix << val;
61  }
62 
63  private:
64  const char *m_prefix;
65  std::ostream *m_os;
66  };
67 
68 } // namespace myDetail
69 
71  out("[OSVR Video Tracker Calibration] ", std::cout);
73  err("[OSVR Video Tracker Calibration] ", std::cerr);
74 
77 static const std::string windowNameAndInstructions(
78  "OSVR Video Tracker Pre-Calibration | q or esc to quit without saving");
79 
80 static int withAnError() {
81  err << "\n";
82  err << "Press enter to exit." << endl;
83  std::cin.ignore();
84  return -1;
85 }
86 static Json::Value findVideoTrackerParams(Json::Value const &drivers) {
87  if (!drivers.isArray()) {
88  return Json::Value(Json::nullValue);
89  }
90  for (auto const &entry : drivers) {
91  if (entry["plugin"] == "com_osvr_VideoBasedHMDTracker" &&
92  entry["driver"] == "VideoBasedHMDTracker") {
93  return entry["params"];
94  }
95  }
96  return Json::Value(Json::nullValue);
97 }
98 
100  public:
101  TrackerCalibrationApp(ImageSourcePtr &&src,
103  : m_src(std::move(src)), m_params(params), m_vbtracker(params),
104  m_distanceBetweenPanels(computeDistanceBetweenPanels(params)) {
105  m_firstNotch = m_params.initialBeaconError * 0.9;
106  m_secondNotch = m_params.initialBeaconError * 0.8;
107  cv::namedWindow(windowNameAndInstructions);
108  }
109 
110  osvr::vbtracker::VideoBasedTracker &vbtracker() { return m_vbtracker; }
111 
112  void run() {
114  vbtracker().getFirstEstimator().permitKalmanMode(false);
115 
117  {
118  out << "Bring the HMD into view and fairly close to the camera."
119  << endl;
120  double zDistance = 1000.;
121  while (zDistance > 0.3 && !m_quit) {
122  tryGrabAndProcessFrame([&zDistance](OSVR_ChannelCount sensor,
123  OSVR_Pose3 const &pose) {
124  if (sensor == 0) {
125  zDistance = osvrVec3GetZ(&pose.translation);
126  }
127  });
128 
129  if (timeToUpdateDisplay()) {
130  m_frame.copyTo(m_display);
131  cv::putText(
132  m_display,
133  "Bring the HMD into view and close to the camera.",
134  cv::Point(30, 30), cv::FONT_HERSHEY_SIMPLEX, 0.4,
135  cv::Scalar(255, 0, 0));
136  updateDisplay();
137  }
138  }
139  }
140  if (m_quit) {
142  return;
143  }
144 
146  vbtracker().getFirstEstimator().permitKalmanMode(true);
147 
148  out << "OK - you'll now want to move the HMD slowly around the view of "
149  "the camera, and rotate it so that all sides can be seen."
150  << endl;
151  out << "Press 's' to save your calibration when you're done." << endl;
152  m_frame.copyTo(m_display);
153  updateDisplay();
154 
155  // Only show the front-panel beacons - we do not want to encourage
156  // attempts to calibrate the back panel, since it's not rigidly attached
157  // to the front panel.
158  static const int MAX_BEACONS_TO_SHOW = 34;
159 
160  while (!m_quit) {
161  bool gotDebugData = false;
162  tryGrabAndProcessFrame(
163  [&](OSVR_ChannelCount sensor, OSVR_Pose3 const &pose) {
164  if (sensor == 0) {
165  gotDebugData = true;
166  }
167  });
168 
169  if (timeToUpdateDisplay() && gotDebugData) {
170  m_frame.copyTo(m_display);
171 
172  {
175  auto &debugData =
176  vbtracker().getFirstEstimator().getBeaconDebugData();
177  auto nBeacons = debugData.size();
178  auto nBeaconsActive = size_t{0};
179  auto nBeaconsIdentified = size_t{0};
180  for (decltype(nBeacons) i = 0; i < nBeacons; ++i) {
181  if (debugData[i].measurement.x != 0) {
182  nBeaconsIdentified++;
183  }
184  if (debugData[i].variance == 0) {
185  continue;
186  }
187  nBeaconsActive++;
188 #if 0
189  cv::line(m_display, debugData[i].measurement, debugData[i].measurement + debugData[i].residual, cv::Scalar(0, 255, 0), 2);
190 #else
191  cv::circle(
192  m_display, debugData[i].measurement,
193  static_cast<int>(cv::norm(debugData[i].residual)),
194  cv::Scalar(20, 20, 20), 2);
195 #endif
196  }
197 
198  cv::putText(m_display,
199  std::to_string(nBeaconsActive) +
200  " beacons active of " +
201  std::to_string(nBeaconsIdentified) +
202  " identified this frame",
203  cv::Point(30, 30), cv::FONT_HERSHEY_SIMPLEX,
204  0.45, cv::Scalar(255, 100, 100));
205 
206  cv::putText(m_display, "Green labels indicate beacons with "
207  "enough calibration data. S to save "
208  "and quit.",
209  cv::Point(30, 50), cv::FONT_HERSHEY_SIMPLEX,
210  0.45, cv::Scalar(255, 100, 100));
211  }
212  auto numGreen = std::size_t{0};
213  auto numYellow = std::size_t{0};
214  auto numUnimproved = std::size_t{0};
215 
216  {
218  std::vector<cv::Point2f> reprojections;
219  vbtracker().getFirstEstimator().ProjectBeaconsToImage(
220  reprojections);
221  const auto nBeacons = reprojections.size();
222  m_nBeacons = nBeacons;
223  const auto beaconsToDisplay =
224  std::min(static_cast<std::size_t>(MAX_BEACONS_TO_SHOW),
225  nBeacons);
226  for (std::size_t i = 0; i < beaconsToDisplay; ++i) {
227  Eigen::Vector3d autocalibVariance =
228  vbtracker()
229  .getFirstEstimator()
230  .getBeaconAutocalibVariance(i);
231  cv::Scalar color{0, 0, 255};
232  if ((autocalibVariance.array() <
233  Eigen::Array3d::Constant(m_secondNotch))
234  .all()) {
236  color = cv::Scalar{0, 255, 0};
237  numGreen++;
238  } else if ((autocalibVariance.array() <
239  Eigen::Array3d::Constant(m_firstNotch))
240  .all()) {
242  color = cv::Scalar{0, 255, 255};
243  numYellow++;
244  } else {
245  numUnimproved++;
246  }
247 
248  cv::putText(m_display, std::to_string(i + 1),
249  reprojections[i] + cv::Point2f(1, 1),
250  cv::FONT_HERSHEY_SIMPLEX, 0.45, color);
251  }
252  }
253 
254  {
256  static const auto PROGRESS_HEIGHT = 5;
257  drawTwoStepProgressBar(
258  m_display,
259  cv::Point(0, m_display.rows - PROGRESS_HEIGHT),
260  cv::Size(m_display.cols, PROGRESS_HEIGHT), numGreen,
261  numYellow, numUnimproved);
262  }
263 
264  auto key = updateDisplay();
265  if ('s' == key || 'S' == key) {
266  m_save = true;
267  m_quit = true;
268  }
269  }
270  }
271 
272  int beaconsToOutput =
273  std::min(MAX_BEACONS_TO_SHOW, static_cast<int>(m_nBeacons));
274  for (int i = 0; i < beaconsToOutput; ++i) {
275  std::cout << "Beacon " << i + 1 << " autocalib variance ratio: "
276  << vbtracker()
277  .getFirstEstimator()
278  .getBeaconAutocalibVariance(i)
279  .transpose() /
280  m_params.initialBeaconError
281  << "\n";
282  }
283 
284  std::cout << endl;
285  if (m_save) {
286  Json::Value calib(Json::arrayValue);
287  out << "Saving your calibration data..." << endl;
288  auto &estimator = vbtracker().getFirstEstimator();
289  auto numBeaconsFromAutocalib =
290  std::min(m_nBeacons, getNumHDKFrontPanelBeacons());
291 
292  for (std::size_t i = 0; i < numBeaconsFromAutocalib; ++i) {
293  calib.append(osvr::common::toJson(
294  estimator.getBeaconAutocalibPosition(i)));
295  }
296  if (m_nBeacons > numBeaconsFromAutocalib) {
297  // This means they wanted rear panel beacons too. We will write
298  // un-calibrated beacon locations for those, so they don't get
299  // "less-neutral" starting positions.
300  Point3Vector locations;
301  addRearPanelBeaconLocations(m_distanceBetweenPanels, locations);
302  for (auto const &beacon : locations) {
303  Json::Value val(Json::arrayValue);
304  val.append(beacon.x);
305  val.append(beacon.y);
306  val.append(beacon.z);
307  calib.append(val);
308  }
309  }
310  std::cout << "\n" << osvr::common::jsonToCompactString(calib)
311  << "\n" << endl;
312 
313  {
314  std::ofstream outfile(m_params.calibrationFile);
315  outfile << osvr::common::jsonToStyledString(calib);
316  outfile.close();
317  }
318  closeWindow();
319  out << "Done! Press enter to exit." << endl;
320  std::cin.ignore();
321  }
322  }
323 
325  template <typename F> bool tryGrabAndProcessFrame(F &&functor) {
326  if (!m_src->grab()) {
327  err << "Failed to grab!" << endl;
328  return false;
329  }
330  m_timestamp = osvr::util::time::getNow();
331  m_src->retrieve(m_frame, m_imageGray);
332 
333  m_vbtracker.processImage(m_frame, m_imageGray, m_timestamp,
334  std::forward<F>(functor));
335  m_frameStride = (m_frameStride + 1) % 11;
336  return true;
337  }
338 
340  bool timeToUpdateDisplay() const { return m_frameStride == 0; }
341 
344  char updateDisplay() {
345  cv::imshow(windowNameAndInstructions, m_display);
346  auto key = static_cast<char>(cv::waitKey(1));
347  if ('q' == key || 'Q' == key || 27 /* esc */ == key) {
348  m_quit = true;
349  }
350  return key;
351  }
352 
353  void closeWindow() { cv::destroyWindow(windowNameAndInstructions); }
354 
355  private:
356  ImageSourcePtr m_src;
358  const float m_distanceBetweenPanels;
359  double m_firstNotch;
360  double m_secondNotch;
362  osvr::util::time::TimeValue m_timestamp;
363  cv::Mat m_frame;
364  cv::Mat m_imageGray;
365  // This is the one the steps of the app should mess with.
366  cv::Mat m_display;
367  std::size_t m_frameStride = 0;
368  std::size_t m_nBeacons = 0;
369  bool m_quit = false;
370  bool m_save = false;
371 };
372 
373 int main(int argc, char *argv[]) {
374  ConfigParams params;
375 
377  {
378  std::string configName(osvr::server::getDefaultConfigFilename());
379  if (argc > 1) {
380  configName = argv[1];
381  } else {
382  out << "Using default config file - pass a filename on the command "
383  "line to use a different one."
384  << endl;
385  }
386 
387  Json::Value root;
388  {
389  out << "Using config file '" << configName << "'" << endl;
390  std::ifstream configFile(configName);
391  if (!configFile.good()) {
392  err << "Could not open config file!" << endl;
393  err << "Searched in the current directory; file may be "
394  "misspelled, missing, or in a different directory."
395  << endl;
396  return withAnError();
397  }
398  Json::Reader reader;
399  if (!reader.parse(configFile, root)) {
400  err << "Could not parse config file as JSON!" << endl;
401  return withAnError();
402  }
403  }
404  auto trackerParams = findVideoTrackerParams(root["drivers"]);
405  if (trackerParams.isNull()) {
406  out << "Warning: No video tracker params found?" << endl;
407  }
408 
409  // Actually parse those params from JSON to the struct, just like the
410  // plugin would.
411  params = parseConfigParams(trackerParams);
412  }
413 
414  if (params.calibrationFile.empty()) {
415  err << "calibrationFile not specified in configuration file! no clue "
416  "where to write to!"
417  << endl;
418  return withAnError();
419  }
420 
422  params.debug = false;
423  params.extraVerbose = false; // don't need those messages
424  params.streamBeaconDebugInfo = true; // want the data being recorded there.
425 
427  auto src = openHDKCameraDirectShow();
428  if (!src || !src->ok()) {
429  err << "Couldn't find or access the IR camera!" << endl;
430  return withAnError();
431  }
432 
433  // Set the number of threads for OpenCV to use.
434  cv::setNumThreads(1);
435 
436  TrackerCalibrationApp trackerApp{std::move(src), params};
437 
439  {
440  auto camParams = getHDKCameraParameters();
441  if (params.includeRearPanel) {
442 
445  setupSensorsIncludeRearPanel(trackerApp.vbtracker(), params, false);
446  } else {
447 
448  err << "WARNING: only calibrating the first sensor is currently "
449  "supported!"
450  << endl;
451  setupSensorsWithoutRearPanel(trackerApp.vbtracker(), params, false);
452  }
453  }
454 
455  trackerApp.run();
456 
457  return 0;
458 }
uint32_t OSVR_ChannelCount
The integer type specifying a number of channels/sensors or a channel/sensor index.
Definition: ChannelCountC.h:51
Header containing wrappers for some common jsoncpp operations.
double osvrVec3GetZ(OSVR_Vec3 const *v)
Accessor for Vec3 component Z.
Definition: Vec3C.h:65
bool timeToUpdateDisplay() const
Is it time to update the display window?
void getNow(TimeValue &tv)
Set the given TimeValue to the current time.
Definition: TimeValue.h:51
bool tryGrabAndProcessFrame(F &&functor)
returns true if we processed a frame.
std::string jsonToStyledString(Json::Value const &val)
Turns the JSON value into a pretty-printed, human-targeted string representation. ...
Definition: JSONHelpers.h:60
int main(int argc, char *argv[])
bool extraVerbose
Extra verbose developer debugging messages.
Definition: ConfigParams.h:247
Header.
General configuration parameters.
Definition: ConfigParams.h:82
Header.
std::string jsonToCompactString(Json::Value const &val)
Turns the JSON value into a compact string representation.
Definition: JSONHelpers.h:53
bool debug
Whether to show the debug windows and debug messages.
Definition: ConfigParams.h:137
A structure defining a 3D (6DOF) rigid body pose: translation and rotation.
Definition: Pose3C.h:54
Standardized, portable parallel to struct timeval for representing both absolute times and time inter...
Definition: TimeValueC.h:81
Json::Value toJson(Eigen::QuaternionBase< Derived > const &quat)
Converts quaternions to JSON objects.
Definition: JSONEigen.h:47
OSVR_Vec3 translation
Position vector.
Definition: Pose3C.h:56
double Scalar
Common scalar type.