34 #include <opencv2/core/version.hpp>
35 #include <opencv2/highgui/highgui.hpp>
36 #include <opencv2/imgproc/imgproc.hpp>
46 VideoBasedTracker::VideoBasedTracker(ConfigParams
const ¶ms)
47 : m_params(params), m_blobExtractor(params.blobParams) {}
50 void VideoBasedTracker::addSensor(
51 LedIdentifierPtr &&identifier, CameraParameters
const &camParams,
52 std::function<
void(BeaconBasedPoseEstimator &)>
const &beaconAdder,
53 size_t requiredInliers,
size_t permittedOutliers) {
54 m_camParams = camParams;
55 m_identifiers.emplace_back(std::move(identifier));
56 m_estimators.emplace_back(
new BeaconBasedPoseEstimator(
57 camParams.createUndistortedVariant(), requiredInliers,
58 permittedOutliers, m_params));
59 m_led_groups.emplace_back();
60 beaconAdder(*m_estimators.back());
64 void VideoBasedTracker::addSensor(
66 Point3Vector
const &locations, Vec3Vector
const &emissionDirection,
67 std::vector<double>
const &variance,
68 BeaconIDPredicate
const &autocalibrationFixedPredicate,
69 size_t requiredInliers,
size_t permittedOutliers,
70 double beaconAutocalibErrorScale) {
71 addSensor(std::move(identifier), camParams,
73 estimator.
SetBeacons(locations, emissionDirection,
75 autocalibrationFixedPredicate,
76 beaconAutocalibErrorScale);
78 requiredInliers, permittedOutliers);
81 void VideoBasedTracker::dumpKeypointDebugData(
82 std::vector<cv::KeyPoint>
const &keypoints) {
84 std::cout <<
"Dumping blob detection debug data, capture frame "
85 << m_debugFrame << std::endl;
86 cv::imwrite(
"debug_rawimage" + std::to_string(m_debugFrame) +
89 cv::imwrite(
"debug_blobframe" + std::to_string(m_debugFrame) +
92 cv::imwrite(
"debug_thresholded" + std::to_string(m_debugFrame) +
98 auto filename = std::string{
"debug_data" +
99 std::to_string(m_debugFrame) +
".txt"};
100 std::ofstream datafile{filename.c_str()};
101 datafile <<
"MinThreshold: " << m_sbdParams.minThreshold
103 datafile <<
"MaxThreshold: " << m_sbdParams.maxThreshold
105 datafile <<
"ThresholdStep: " << m_sbdParams.thresholdStep
107 datafile <<
"Thresholds:" << std::endl;
108 for (
double thresh = m_sbdParams.minThreshold;
109 thresh < m_sbdParams.maxThreshold;
110 thresh += m_sbdParams.thresholdStep) {
111 datafile << thresh << std::endl;
117 for (
auto &keypoint : keypoints) {
118 kpcsv.
row() <<
cell(
"x", keypoint.pt.x)
119 <<
cell(
"y", keypoint.pt.y)
120 <<
cell(
"size", keypoint.size);
122 auto filename = std::string{
"debug_blobdetect" +
123 std::to_string(m_debugFrame) +
".csv"};
124 std::ofstream csvfile{filename.c_str()};
128 std::cout <<
"Data dump complete." << std::endl;
132 bool VideoBasedTracker::processImage(cv::Mat frame, cv::Mat grayImage,
134 PoseHandler handler) {
135 m_assertInvariants();
138 m_imageGray = grayImage;
139 auto foundLeds = m_blobExtractor.extractBlobs(grayImage);
142 auto undistortedLeds = undistortLeds(foundLeds, m_camParams);
148 for (
size_t sensor = 0; sensor < m_identifiers.size(); sensor++) {
151 auto ledsMeasurements = undistortedLeds;
163 auto &myLeds = m_led_groups[sensor];
164 auto led = begin(myLeds);
165 auto e = end(myLeds);
166 while (led != end(myLeds)) {
168 auto threshold = m_params.blobMoveThreshold *
169 led->getMeasurement().diameter;
170 auto nearest = led->nearest(ledsMeasurements, threshold);
171 if (nearest == end(ledsMeasurements)) {
174 led = myLeds.erase(led);
179 led->addMeasurement(*nearest,
180 m_params.blobsKeepIdentity);
181 ledsMeasurements.erase(nearest);
189 for (
auto &remainingLed : ledsMeasurements) {
190 myLeds.emplace_back(m_identifiers[sensor].
get(),
197 bool gotPose =
false;
198 if (m_estimators[sensor]) {
202 if (m_estimators[sensor]->EstimatePoseFromLeds(
203 m_led_groups[sensor], tv, pose)) {
205 handler(static_cast<unsigned>(sensor), pose);
209 if (m_params.debug) {
212 static const auto RED = cv::Vec3b(0, 0, 255);
213 static const auto YELLOW = cv::Vec3b(0, 255, 255);
214 static const auto GREEN = cv::Vec3b(0, 255, 0);
215 static int count = 0;
219 m_thresholdImage = m_blobExtractor.getDebugThresholdImage();
222 m_imageWithBlobs = m_blobExtractor.getDebugBlobImage();
226 m_frame.copyTo(m_statusImage);
227 for (
auto const &led : m_led_groups[sensor]) {
228 auto loc = led.getLocation();
229 if (!led.identified()) {
230 drawLedCircleOnStatusImage(led,
false, RED);
231 }
else if (!gotPose) {
234 drawLedCircleOnStatusImage(led,
false, YELLOW);
236 drawRecognizedLedIdOnStatusImage(led);
241 for (
auto &led : m_led_groups[sensor]) {
243 auto label = std::to_string(led.getOneBasedID());
244 cv::Point where = led.getLocation();
247 cv::putText(m_imageWithBlobs, label, where,
248 cv::FONT_HERSHEY_SIMPLEX, 0.5,
256 std::vector<cv::Point2f> imagePoints;
257 m_estimators[sensor]->ProjectBeaconsToImage(
259 const size_t n = imagePoints.size();
260 for (
size_t i = 0; i < n; ++i) {
262 auto label = std::to_string(i + 1);
263 auto where = imagePoints[i];
266 cv::putText(m_imageWithBlobs, label, where,
267 cv::FONT_HERSHEY_SIMPLEX, 0.5,
273 for (
auto const &led : m_led_groups[sensor]) {
274 if (led.identified()) {
278 led.wasUsedLastFrame() ? GREEN : YELLOW;
281 drawLedCircleOnStatusImage(led,
true, color);
283 auto id =
static_cast<size_t>(led.getID());
285 auto reprojection = imagePoints[id];
287 drawRecognizedLedIdOnStatusImage(led);
290 std::to_string(led.getOneBasedID()),
291 reprojection, cv::FONT_HERSHEY_SIMPLEX,
299 if (!m_debugHelpDisplayed) {
301 <<
"\nVideo-based tracking debug windows help:\n";
303 <<
" - press 's' to show the detected blobs and "
304 "the status of recognized beacons (default)\n"
305 <<
" - press 'b' to show the labeled blobs and "
306 "the reprojected beacons\n"
307 <<
" - press 'i' to show the raw input image\n"
308 <<
" - press 't' to show the blob-detecting "
310 <<
" - press 'p' to dump the current "
311 "auto-calibrated beacon positions to a CSV "
313 <<
" - press 'q' to quit the debug windows "
314 "(tracker will continue operation)\n"
316 m_debugHelpDisplayed =
true;
320 std::ostringstream windowName;
321 windowName <<
"Sensor" << sensor;
322 cv::imshow(windowName.str(), *m_shownImage);
323 int key = cv::waitKey(1) & 0xff;
327 m_shownImage = &m_statusImage;
331 m_shownImage = &m_imageWithBlobs;
335 m_shownImage = &m_frame;
340 m_shownImage = &m_thresholdImage;
345 dumpKeypointDebugData(foundKeyPoints);
351 std::ofstream beaconfile(
"beacons.csv");
352 for (
auto const &estimator : m_estimators) {
353 beaconfile <<
"----" << std::endl;
354 estimator->dumpBeaconLocationsToStream(
366 m_params.debug =
false;
367 cv::destroyAllWindows();
376 m_assertInvariants();
380 void VideoBasedTracker::drawLedCircleOnStatusImage(
Led const &led,
388 void VideoBasedTracker::drawRecognizedLedIdOnStatusImage(Led
const &led) {
390 auto label = std::to_string(led.getOneBasedID());
391 cv::putText(m_statusImage, label, led.getLocation(),
392 cv::FONT_HERSHEY_SIMPLEX, 0.25,
cv::Scalar(127, 127, 127));
Helper class to keep track of the state of a blob over time. This is used to help determine the ident...
cv::Point2f getLocation() const
Reports the most-recently-added position.
Header wrapping include of and for warning quieting.
void output(std::ostream &os) const
bool SetBeacons(const Point3Vector &beacons, Vec3Vector const &emissionDirection, std::vector< double > const &variance, BeaconIDPredicate const &autocalibrationFixedPredicate, double beaconAutocalibErrorScale=1)
The Util library: Functionality not necessarily coupled to any particular core library, serving more as a common base layer behind all systems.
Class to track an object that has identified LED beacons on it as seen in a camera, where the absolute location of the LEDs with respect to a common frame of reference is known. Returns the transformation that takes points from the model coordinate system to the camera coordinate system.
float diameter
Blob diameter in pixels.
A structure defining a 3D (6DOF) rigid body pose: translation and rotation.
void osvrPose3SetIdentity(OSVR_Pose3 *pose)
Set a pose to identity.
Standardized, portable parallel to struct timeval for representing both absolute times and time inter...
detail::Cell< T > cell(const char *header, T const &data)
Helper free function to make a CSV cell.
double Scalar
Common scalar type.