OSVR Framework (Internal Development Docs)  0.6-1962-g59773924
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Modules Pages
RoomCalibration.cpp
Go to the documentation of this file.
1 
11 // Copyright 2016 Sensics, Inc.
12 //
13 // Licensed under the Apache License, Version 2.0 (the "License");
14 // you may not use this file except in compliance with the License.
15 // You may obtain a copy of the License at
16 //
17 // http://www.apache.org/licenses/LICENSE-2.0
18 //
19 // Unless required by applicable law or agreed to in writing, software
20 // distributed under the License is distributed on an "AS IS" BASIS,
21 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
22 // See the License for the specific language governing permissions and
23 // limitations under the License.
24 
27 #undef OSVR_UVBI_DUMP_CALIB_LOG
28 
29 // Internal Includes
30 #include "RoomCalibration.h"
31 #include "Assumptions.h"
32 #include "ForEachTracked.h"
33 #include "TrackedBodyIMU.h"
34 
35 // Library/third-party includes
36 #include <boost/assert.hpp>
37 #include <osvr/Util/EigenExtras.h>
38 #include <osvr/Util/ExtractYaw.h>
39 
40 #ifdef OSVR_UVBI_DUMP_CALIB_LOG
41 #include <osvr/Util/CSV.h>
42 #include <osvr/Util/CSVCellGroup.h>
43 #endif
44 
45 // Standard includes
46 #include <iostream>
47 #include <stdexcept>
48 
49 #ifdef OSVR_UVBI_DUMP_CALIB_LOG
50 #include <fstream>
51 #endif
52 
53 namespace osvr {
54 namespace vbtracker {
55  namespace filters = util::filters;
56 
58 
62 
65  static const auto LINEAR_VELOCITY_CUTOFF = 0.75;
66  static const auto ANGULAR_VELOCITY_CUTOFF = .75;
67  static const std::size_t REQUIRED_SAMPLES = 15;
68 
72  static const auto NEAR_MESSAGE_CUTOFF = 0.4;
73 
74  RoomCalibration::RoomCalibration(Eigen::Vector3d const &camPosition,
75  bool cameraIsForward)
76  : m_lastVideoData(util::time::getNow()),
77  m_suppliedCamPosition(camPosition),
78  m_cameraIsForward(cameraIsForward) {}
79 
81  BodyTargetId const &target) const {
82  // This was once all in one conditional expression but it was almost
83  // impossible for humans to parse, thus I leave it to computers to parse
84  // this and optimize it.
85 
87  if (!haveIMUData()) {
88  return false;
89  }
90  if (!haveVideoData()) {
93  return target.first == m_imuBody;
94  }
95 
98  return m_videoTarget == target;
99  }
100 
102  TrackingSystem const &sys, BodyTargetId const &target,
103  util::time::TimeValue const &timestamp, Eigen::Vector3d const &xlate,
104  Eigen::Quaterniond const &quat) {
105  if (!wantVideoData(sys, target)) {
106  // early out if this is the wrong target, or we don't have IMU data
107  // yet.
108  return;
109  }
110  if (!xlate.array().allFinite() || !quat.coeffs().array().allFinite()) {
111  // non-finite camera pose
112  return;
113  }
114  if (!haveVideoData()) {
115  msg() << "Got first video report from target " << target
116  << std::endl;
117  }
118  bool firstData = !haveVideoData();
119  m_videoTarget = target;
120  auto dt = duration(timestamp, m_lastVideoData);
121  m_lastVideoData = timestamp;
122  if (dt <= 0) {
123  dt = 1; // in case of weirdness, avoid divide by zero.
124  }
125 
126  Eigen::Quaterniond prevRot = m_poseFilter.getOrientation();
127  Eigen::Vector3d prevXlate = m_poseFilter.getPosition();
128 
129  // Pre-filter the camera data in case it's noisy (quite possible since
130  // it's RANSAC)
131  m_poseFilter.filter(dt, xlate, quat);
132 
133  // Pose of tracked device (in camera space) is cTd
134  // orientation is rTd or iTd: tracked device in IMU space (aka room
135  // space, modulo yaw)
136  // rTc is camera in room space (what we want to find), so later we can
137  // take camera-reported cTb, perform rTc * cTb, and end up with rTb with
138  // position...
139  Eigen::Quaterniond rTc =
140  m_imuOrientation * m_poseFilter.getOrientation().inverse();
141  Eigen::Vector3d rTcln = util::quat_ln(rTc);
142  // Look at the velocity to see if the user was holding still enough.
143  if (!firstData) {
144  using XlateDeriv =
146  using QuatDeriv =
148  // We want the velocity of the filtered output, not the noisy input
149  // velocity the one-euro filter itself would provide.
150  m_linVel = XlateDeriv::computeMagnitude(
151  XlateDeriv::compute(prevXlate, m_poseFilter.getPosition(), dt));
152  m_angVel = QuatDeriv::computeMagnitude(
153  QuatDeriv::compute(prevRot, m_poseFilter.getOrientation(), dt));
154  }
155 
156 #ifdef OSVR_UVBI_DUMP_CALIB_LOG
157  {
158  static util::CSV csv;
159  static bool output = false;
160  if (csv.numDataRows() < 400) {
161  csv.row() << util::cell("dt", dt) << util::cellGroup(xlate)
162  << util::cellGroup(quat)
163  << util::cellGroup("filtered.",
164  m_poseFilter.getPosition())
165  << util::cellGroup("filtered.",
166  m_poseFilter.getOrientation())
167  << util::cellGroup("rTc_ln.", rTcln)
168  << util::cell("linVel", m_linVel)
169  << util::cell("angVel", m_angVel);
170  } else if (!output) {
171  output = true;
172  std::ofstream os("calib.csv");
173  csv.output(os);
174  msg() << "Done writing calibration log data." << std::endl;
175  }
176  }
177 #endif // OSVR_UVBI_DUMP_CALIB_LOG
178 
179  // std::cout << "linear " << linearVel << " ang " << angVel << "\n";
180  if (m_linVel < LINEAR_VELOCITY_CUTOFF &&
181  m_angVel < ANGULAR_VELOCITY_CUTOFF) {
182  // OK, velocity within bounds
183  if (m_steadyVideoReports == 0) {
184  msg() << "Hold still, performing room calibration";
185  }
186  msgStream() << "." << std::flush;
187  m_rTc_ln_accum += rTcln;
188  ++m_steadyVideoReports;
189  if (finished()) {
191  msgStream() << "\n" << std::endl;
192  }
193  } else {
194  handleExcessVelocity(xlate.z());
195  }
196  }
197  void RoomCalibration::handleExcessVelocity(double zTranslation) {
198  // reset the count if movement too fast.
199  if (m_steadyVideoReports > 0) {
201  msgStream() << std::endl;
202 
203  msg() << "Restarting ";
204  if (m_linVel >= LINEAR_VELOCITY_CUTOFF) {
205  msgStream() << " - Linear velocity too high (" << m_linVel
206  << ")";
207  }
208  if (m_angVel >= ANGULAR_VELOCITY_CUTOFF) {
209  msgStream() << " - Angular velocity too high (" << m_angVel
210  << ")";
211  }
212  msgStream() << "\n";
213  }
214  m_steadyVideoReports = 0;
215  m_rTc_ln_accum = Eigen::Vector3d::Zero();
216 
217  switch (m_instructionState) {
218  case InstructionState::Uninstructed:
219  if (zTranslation > NEAR_MESSAGE_CUTOFF) {
220  instructions()
221  << "NOTE: For best results, during tracker/server "
222  "startup, hold your head/HMD still closer than "
223  << NEAR_MESSAGE_CUTOFF
224  << " meters from the tracking camera for a few "
225  "seconds, then rotate slowly in all directions.";
226  endInstructions();
227  m_instructionState = InstructionState::ToldToMoveCloser;
228  }
229  break;
230  case InstructionState::ToldToMoveCloser:
231  if (zTranslation < (0.9 * NEAR_MESSAGE_CUTOFF)) {
232  instructions() << "That distance looks good, rotate the device "
233  "gently until you get a 'Hold still' "
234  "message.";
235  endInstructions();
236  m_instructionState = InstructionState::ToldDistanceIsGood;
237  }
238  break;
239  case InstructionState::ToldDistanceIsGood:
240  // nothing to do now!
241  break;
242  }
243  }
244  void RoomCalibration::processIMUData(TrackingSystem const &sys,
245  BodyId const &body,
246  util::time::TimeValue const &timestamp,
247  Eigen::Quaterniond const &quat) {
248  if (haveIMUData() && m_imuBody != body) {
249 // Already got data from a different IMU
250 #ifdef OSVR_UVBI_ASSUME_SINGLE_IMU
251  throw std::logic_error(
252  "RoomCalibration just received data from a second IMU, but the "
253  "single IMU assumption define is still in "
254  "place!");
255 #endif
256  return;
257  }
258 
259  if (!quat.coeffs().array().allFinite()) {
260  // non-finite quat
261  msg() << "Non-finite quat" << std::endl;
262  return;
263  }
264  bool first = false;
265  if (!haveIMUData()) {
266 
267  if (!sys.isValidBodyId(body)) {
268  msg() << "Invalid body ID" << std::endl;
269  return;
270  }
271  auto &trackedBody = sys.getBody(body);
272  if (trackedBody.getNumTargets() == 0) {
273  // Can't use an IMU on a body with no video-based targets for
274  // calibration.
275  return;
276  }
277  // OK, so this is the first IMU report, fine with me.
278  msg() << "Got first IMU report from body " << body.value()
279  << std::endl;
280  m_imuBody = body;
281  first = true;
282  }
283  BOOST_ASSERT_MSG(m_imuBody == body, "BodyID for incoming data and "
284  "known IMU must be identical at "
285  "this point");
286 
287  if (first) {
288  // for setup purposes, we'll constrain w to be positive.
289  m_imuOrientation =
290  quat.w() >= 0. ? quat : Eigen::Quaterniond(-quat.coeffs());
291  } else {
292  // Keep the quaternions continuous with the previous ones, so our
293  // average of quat logs doesn't give us a bogus result.
294  m_imuOrientation =
295  util::flipQuatSignToMatch(m_imuOrientation, quat);
296  }
297  }
298 
300  if (!finished()) {
301  return false;
302  }
303  msg() << "Room calibration process complete." << std::endl;
304 
312 
328 
329  Eigen::Quaterniond iRc =
330  util::quat_exp(m_rTc_ln_accum / m_steadyVideoReports);
331 
332  if (m_cameraIsForward) {
333  auto yaw = util::extractYaw(iRc);
334  iRc = Eigen::AngleAxisd(-yaw, Eigen::Vector3d::UnitY()) * iRc;
336  m_imuYaw = -yaw * util::radians;
337  } else {
338  m_imuYaw = 0;
339  }
340  m_cameraPose = util::makeIsometry(m_suppliedCamPosition, iRc);
341  msg() << "camera pose AKA rTc: translation: "
342  << m_cameraPose.translation().transpose() << " rotation: ";
343  Eigen::AngleAxisd rot(m_cameraPose.rotation());
344  msgStream() << rot.angle() << " radians about "
345  << rot.axis().transpose() << std::endl;
346 
347  m_calibComplete = true;
348 
349  // Now go through the tracking system and pass along the information.
350  sys.setCameraPose(getCameraPose());
351  forEachIMU(sys, [&](TrackedBodyIMU &imu) {
352  auto yaw = getCalibrationYaw(imu.getBody().getId());
353  if (yaw) {
354  imu.setCalibrationYaw(*yaw);
355  }
356  });
357  return true;
358  }
359 
360  boost::optional<util::Angle>
361  RoomCalibration::getCalibrationYaw(BodyId const &body) const {
362  BOOST_ASSERT_MSG(calibrationComplete(), "Not valid to call "
363  "getCalibrationYaw() unless "
364  "calibration is complete!");
365  if (m_imuBody == body) {
366  return m_imuYaw;
367  }
368  return boost::none;
369  }
370 
371  Eigen::Isometry3d RoomCalibration::getCameraPose() const {
372  BOOST_ASSERT_MSG(calibrationComplete(), "Not valid to call "
373  "getCameraPose() unless "
374  "calibration is complete!");
375  return m_cameraPose;
376  }
377 
378  bool RoomCalibration::finished() const {
379  return m_steadyVideoReports >= REQUIRED_SAMPLES;
380  }
381 
382  std::ostream &RoomCalibration::msgStream() const { return std::cout; }
383  std::ostream &RoomCalibration::msg() const {
384  return msgStream() << "[Unified Tracker: Room Calibration] ";
385  }
386 
387  std::ostream &RoomCalibration::instructions() const {
388  msgStream() << "\n\n";
389  return msg();
390  }
391  void RoomCalibration::endInstructions() const {
392  msgStream() << "\n\n" << std::endl;
393  }
394  bool isRoomCalibrationComplete(TrackingSystem const &sys) {
395  // Check for camera pose
396  if (!sys.haveCameraPose()) {
397  return false;
398  }
399 
400  // Check all IMUs.
401  auto numIMUs = std::size_t{0};
402  bool complete = true;
403  forEachIMU(sys, [&](TrackedBodyIMU const &imu) {
404  complete = complete && imu.calibrationYawKnown();
405  ++numIMUs;
406  });
407 
408 #ifdef OSVR_UVBI_ASSUME_SINGLE_IMU
409  if (numIMUs > 1) {
410  throw std::logic_error("More than one IMU system wide, but the "
411  "single IMU assumption define is still in "
412  "place!");
413  }
414 #endif
415  return complete;
416  }
417 } // namespace vbtracker
418 } // namespace osvr
Header.
void getNow(TimeValue &tv)
Set the given TimeValue to the current time.
Definition: TimeValue.h:51
auto extractYaw(T const &quat) -> decltype(std::atan(quat.w()))
Definition: ExtractYaw.h:47
BodyId getId() const
Gets the body ID within the tracking system.
::OSVR_TimeValue TimeValue
C++-friendly typedef for the OSVR_TimeValue structure.
Definition: TimeValue.h:48
double duration(TimeValue const &a, TimeValue const &b)
Get a double containing seconds between the time points.
Definition: TimeValue.h:62
void output(std::ostream &os) const
Definition: CSV.h:340
detail::CellGroup< T > cellGroup(const char *groupHeader, T const &data)
Helper function to create a cell group with a group header prefix.
Definition: CSVCellGroup.h:160
Filters for use with Eigen datatypes.
Definition: EigenFilters.h:43
bool wantVideoData(TrackingSystem const &sys, BodyTargetId const &target) const
Header containing preprocessor defines for various simplifying assumptions made for the sake of itera...
void processVideoData(TrackingSystem const &sys, BodyTargetId const &target, util::time::TimeValue const &timestamp, Eigen::Vector3d const &xlate, Eigen::Quaterniond const &quat)
Header.
std::size_t numDataRows() const
Definition: CSV.h:261
Isometry3< typename Derived1::Scalar > makeIsometry(Eigen::MatrixBase< Derived1 > const &translation, Eigen::RotationBase< Derived2, 3 > const &rotation)
A simpler, functional-style alternative to .fromPositionOrientationScale when no scaling is performed...
Definition: EigenExtras.h:93
RowProxy row()
Definition: CSV.h:255
Standardized, portable parallel to struct timeval for representing both absolute times and time inter...
Definition: TimeValueC.h:81
detail::Cell< T > cell(const char *header, T const &data)
Helper free function to make a CSV cell.
Definition: CSV.h:401
Eigen::Isometry3d getCameraPose() const
bool postCalibrationUpdate(TrackingSystem &sys)