OSVR Framework (Internal Development Docs)  0.6-1962-g59773924
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Modules Pages
VideoIMUFusion.cpp
Go to the documentation of this file.
1 
11 // Copyright 2015 Sensics, Inc.
12 //
13 // Licensed under the Apache License, Version 2.0 (the "License");
14 // you may not use this file except in compliance with the License.
15 // You may obtain a copy of the License at
16 //
17 // http://www.apache.org/licenses/LICENSE-2.0
18 //
19 // Unless required by applicable law or agreed to in writing, software
20 // distributed under the License is distributed on an "AS IS" BASIS,
21 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
22 // See the License for the specific language governing permissions and
23 // limitations under the License.
24 
25 // Internal Includes
26 #include "VideoIMUFusion.h"
27 #include "RunningData.h"
28 #include <osvr/Util/EigenFilters.h>
29 #include <osvr/Util/EigenInterop.h>
31 #include <osvr/Util/ExtractYaw.h>
32 
33 // Library/third-party includes
34 #include <boost/assert.hpp>
35 
36 // Standard includes
37 #include <iostream>
38 
39 #ifdef OSVR_FPE
40 #include <FPExceptionEnabler.h>
41 #endif
42 
45 namespace ei = osvr::util::eigen_interop;
46 
47 namespace filters = osvr::util::filters;
48 
52 static const auto LINEAR_VELOCITY_CUTOFF = 0.2;
53 static const auto ANGULAR_VELOCITY_CUTOFF = 1.e-4;
54 
56 static const std::size_t REQUIRED_SAMPLES = 10;
57 
61 static const auto NEAR_MESSAGE_CUTOFF = 0.3;
62 
64  : m_params(params), m_roomCalib(Eigen::Isometry3d::Identity()) {
65  enterCameraPoseAcquisitionState();
66 }
68 
69 Eigen::Matrix<double, 12, 12> const &
71  BOOST_ASSERT_MSG(running(),
72  "Only valid if fusion is in the running state!");
73  return m_runningData->getErrorCovariance();
74 }
75 
76 void VideoIMUFusion::enterRunningState(
77  Eigen::Isometry3d const &rTc, const OSVR_TimeValue &timestamp,
78  const OSVR_PoseReport &report, const OSVR_OrientationState &orientation) {
79 #ifdef OSVR_FPE
80  FPExceptionEnabler fpe;
81 #endif
82  m_rTc = rTc;
83  std::cout << "\nVideo-IMU fusion: Camera pose acquired, entering normal "
84  "run mode!\n";
85  std::cout << "Camera is located in the room at roughly "
86  << m_rTc.translation().transpose() << std::endl;
87 
88  if (m_params.cameraIsForward) {
89  auto yaw = osvr::util::extractYaw(Eigen::Quaterniond(m_rTc.rotation()));
90  Eigen::AngleAxisd correction(-yaw, Eigen::Vector3d::UnitY());
91  m_roomCalib = Eigen::Isometry3d(correction);
92  }
93 
94  m_state = State::Running;
95  m_runningData.reset(new VideoIMUFusion::RunningData(
96  m_params, m_rTc, orientation, report.pose, timestamp));
99  m_startupData.reset();
100  ei::map(m_camera) = m_roomCalib * m_rTc;
101 }
102 
104  const OSVR_OrientationReport &report) {
105  if (m_state != State::Running) {
106  // Just report the orientation with height added on.
107  m_lastPose.rotation = report.rotation;
108  ei::map(m_lastPose).translation() =
109  Eigen::Vector3d::UnitY() * m_params.eyeHeight;
110  m_lastTime = timestamp;
111  return;
112  }
113  m_runningData->handleIMUReport(timestamp, report);
114 
115  // send a pose report
116  updateFusedOutput(timestamp);
117 }
119  const Eigen::Vector3d &angVel) {
121  static const auto DT = 1. / 50.;
124  ei::map(m_lastVelocity.angularVelocity.incrementalRotation) =
125  osvr::util::quat_exp_map((angVel * DT).eval()).exp();
126  m_lastVelocity.angularVelocity.dt = DT;
127 
128  ei::map(m_lastVelocity.linearVelocity) = Eigen::Vector3d::Zero();
129  m_lastVelocity.linearVelocityValid = false;
130  m_lastVelTime = timestamp;
131 
132  if (m_state != State::Running) {
134  return;
135  }
136  m_runningData->handleIMUVelocity(timestamp, angVel);
137  // send a pose report
138  updateFusedOutput(timestamp);
139 }
140 
141 void VideoIMUFusion::updateFusedOutput(const OSVR_TimeValue &timestamp) {
142  Eigen::Isometry3d initialPose =
143  Eigen::Translation3d(m_runningData->getPosition() +
144  Eigen::Vector3d::UnitY() * m_params.eyeHeight) *
145  m_runningData->getOrientation();
146  Eigen::Isometry3d transformed = m_roomCalib * initialPose;
147  ei::map(m_lastPose).rotation() = Eigen::Quaterniond(transformed.rotation());
148  ei::map(m_lastPose).translation() =
149  Eigen::Vector3d(transformed.translation());
150  m_lastTime = timestamp;
151 }
152 
154  const OSVR_TimeValue &timestamp, const OSVR_PoseReport &report) {
155  assert(running() &&
156  "handleVideoTrackerDataWhileRunning() called when not running!");
157  // Pass this along to the filter
158  m_runningData->handleVideoTrackerReport(timestamp, report);
159 
160  updateFusedOutput(timestamp);
161  // For debugging, we will output a second sensor that is just the
162  // video tracker data re-oriented.
163  Eigen::Isometry3d videoPose =
164  m_roomCalib * m_runningData->takeCameraPoseToRoom(report.pose);
165  ei::map(m_reorientedVideo) = videoPose;
166 }
167 
169  public:
170  StartupData()
171  : last(getNow()), positionFilter(filters::one_euro::Params{}),
172  orientationFilter(filters::one_euro::Params{}) {}
173  EIGEN_MAKE_ALIGNED_OPERATOR_NEW
174  void handleReport(const OSVR_TimeValue &timestamp,
175  const OSVR_PoseReport &report,
176  const OSVR_OrientationState &orientation) {
177  auto dt = duration(timestamp, last);
178  if (dt <= 0) {
179  dt = 1; // in case of weirdness, avoid divide by zero.
180  }
181  // Pose of tracked device (in camera space) is cTd
182  // orientation is rTd or iTd: tracked device in IMU space (aka room
183  // space, modulo yaw)
184  // rTc is camera in room space (what we want to find), so we can take
185  // camera-reported cTd, perform rTc * cTd, and end up with rTd with
186  // position...
187  Eigen::Isometry3d rTc = Eigen::Isometry3d(ei::map(orientation).quat()) *
188  ei::map(report.pose).transform().inverse();
189  positionFilter.filter(dt, rTc.translation());
190  orientationFilter.filter(dt, Eigen::Quaterniond(rTc.rotation()));
191  auto linearVel = positionFilter.getDerivativeMagnitude();
192  auto angVel = orientationFilter.getDerivativeMagnitude();
193  // std::cout << "linear " << linearVel << " ang " << angVel << "\n";
194  if (linearVel < LINEAR_VELOCITY_CUTOFF &&
195  angVel < ANGULAR_VELOCITY_CUTOFF) {
196  // OK, velocity within bounds
197  if (reports == 0) {
198  std::cout
199  << "Video-IMU fusion: Hold still, measuring camera pose";
200  }
201  std::cout << "." << std::flush;
202  ++reports;
203  } else {
204  // reset the count if movement too fast.
205  if (reports > 0) {
207  std::cout << std::endl;
208  }
209  reports = 0;
210  if (!toldToMoveCloser &&
211  osvrVec3GetZ(&report.pose.translation) > NEAR_MESSAGE_CUTOFF) {
212  std::cout
213  << "\n\nNOTE: For best results, during tracker/server "
214  "startup, hold your head/HMD still closer than "
215  << NEAR_MESSAGE_CUTOFF
216  << " meters from the tracking camera for a few "
217  "seconds, then rotate slowly in all directions.\n\n"
218  << std::endl;
219  toldToMoveCloser = true;
220  } else if (toldToMoveCloser && !toldDistanceIsGood &&
221  osvrVec3GetZ(&report.pose.translation) <
222  0.9 * NEAR_MESSAGE_CUTOFF) {
223  std::cout
224  << "\nThat distance looks good, hold it right there.\n"
225  << std::endl;
226  toldDistanceIsGood = true;
227  }
228  }
229  last = timestamp;
230  if (finished()) {
232  std::cout << "\n" << std::endl;
233  }
234  }
235 
236  bool finished() const { return reports >= REQUIRED_SAMPLES; }
237 
238  Eigen::Isometry3d getRoomToCamera() const {
239  Eigen::Isometry3d ret;
240  ret.fromPositionOrientationScale(positionFilter.getState(),
241  orientationFilter.getState(),
242  Eigen::Vector3d::Constant(1));
243  return ret;
244  }
245 
246  private:
247  std::size_t reports = 0;
248  OSVR_TimeValue last;
249 
250  bool toldToMoveCloser = false;
251  bool toldDistanceIsGood = false;
252 
253  filters::OneEuroFilter<Eigen::Vector3d> positionFilter;
254  filters::OneEuroFilter<Eigen::Quaterniond> orientationFilter;
255 };
256 
257 void VideoIMUFusion::enterCameraPoseAcquisitionState() {
258  m_startupData.reset(new VideoIMUFusion::StartupData);
259  m_state = State::AcquiringCameraPose;
260 }
261 
263  const OSVR_TimeValue &timestamp, const OSVR_PoseReport &report,
264  const OSVR_OrientationState &orientation) {
265  assert(!running() &&
266  "handleVideoTrackerDataDuringStartup() called when running!");
267  m_startupData->handleReport(timestamp, report, orientation);
268  if (m_startupData->finished()) {
269  enterRunningState(m_startupData->getRoomToCamera(), timestamp, report,
270  orientation);
271  }
272 }
EIGEN_MAKE_ALIGNED_OPERATOR_NEW void handleReport(const OSVR_TimeValue &timestamp, const OSVR_PoseReport &report, const OSVR_OrientationState &orientation)
OSVR_PoseState pose
The pose structure, containing a position vector and a rotation quaternion.
void handleVideoTrackerDataWhileRunning(const OSVR_TimeValue &timestamp, const OSVR_PoseReport &report)
double osvrVec3GetZ(OSVR_Vec3 const *v)
Accessor for Vec3 component Z.
Definition: Vec3C.h:65
Header.
VideoIMUFusion(VideoIMUFusionParams const &params=VideoIMUFusionParams())
Constructor.
void getNow(TimeValue &tv)
Set the given TimeValue to the current time.
Definition: TimeValue.h:51
OSVR_Quaternion rotation
Orientation as a unit quaternion.
Definition: Pose3C.h:58
auto extractYaw(T const &quat) -> decltype(std::atan(quat.w()))
Definition: ExtractYaw.h:47
Eigen::Matrix< double, 12, 12 > const & getErrorCovariance() const
EIGEN_MAKE_ALIGNED_OPERATOR_NEW void handleIMUData(const OSVR_TimeValue &timestamp, const OSVR_OrientationReport &report)
A structure defining a quaternion, often a unit quaternion representing 3D rotation.
Definition: QuaternionC.h:49
We do not yet know the relative pose of the camera.
double duration(TimeValue const &a, TimeValue const &b)
Get a double containing seconds between the time points.
Definition: TimeValue.h:62
~VideoIMUFusion()
Out-of-line destructor required for unique_ptr pimpl idiom.
Header defining some filters for Eigen datatypes.
Eigen::Map< Eigen::Vector3d > translation() const
Definition: EigenInterop.h:334
Filters for use with Eigen datatypes.
Definition: EigenFilters.h:43
TransformType transform() const
Read-only accessor for the pose as an Eigen transform.
Definition: EigenInterop.h:280
void handleVideoTrackerDataDuringStartup(const OSVR_TimeValue &timestamp, const OSVR_PoseReport &report, const OSVR_OrientationState &orientation)
Report type for an orientation callback on a tracker interface.
OSVR_CBool linearVelocityValid
Whether the data source reports valid data for #OSVR_VelocityState::linearVelocity.
void handleIMUVelocity(const OSVR_TimeValue &timestamp, const Eigen::Vector3d &angVel)
Header for interoperation between the Eigen math library, the internal mini math library, and VRPN's quatlib.
Standardized, portable parallel to struct timeval for representing both absolute times and time inter...
Definition: TimeValueC.h:81
OSVR_OrientationState rotation
The rotation unit quaternion.
OSVR_Vec3 translation
Position vector.
Definition: Pose3C.h:56
Report type for a pose (position and orientation) callback on a tracker interface.