From db59542cbb2fc3c2ac1b40928d514113bced8b0b Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sat, 6 Sep 2014 03:50:52 -0700 Subject: rename case --- ftnoir_tracker_pt/trans_calib.h | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 ftnoir_tracker_pt/trans_calib.h (limited to 'ftnoir_tracker_pt/trans_calib.h') diff --git a/ftnoir_tracker_pt/trans_calib.h b/ftnoir_tracker_pt/trans_calib.h new file mode 100644 index 00000000..f2521690 --- /dev/null +++ b/ftnoir_tracker_pt/trans_calib.h @@ -0,0 +1,39 @@ +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef TRANSCALIB_H +#define TRANSCALIB_H + +#include + +//----------------------------------------------------------------------------- +// Calibrates the translation from head to model = t_MH +// by recursive least squares / +// kalman filter in information form with identity noise covariance +// measurement equation when head position = t_CH is fixed: +// (R_CM_k , Id)*(-t_MH, t_CH) = t_CM_k + +class TranslationCalibrator +{ +public: + TranslationCalibrator(); + + // reset the calibration process + void reset(); + + // update the current estimate + void update(const cv::Matx33f& R_CM_k, const cv::Vec3f& t_CM_k); + + // get the current estimate for t_MH + cv::Vec3f get_estimate(); + +protected: + cv::Matx66f P; // normalized precision matrix = inverse covariance + cv::Vec6f y; // P*(-t_MH, t_CH) +}; + +#endif //TRANSCALIB_H \ No newline at end of file -- cgit v1.2.3 From e2b296165236cdb0952c429d91d357b92d9ee18e Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Mon, 22 Sep 2014 19:02:33 +0200 Subject: dos2unix only --- ftnoir_tracker_pt/FTNoIR_PT_Controls.ui | 3090 ++++++++++++------------ ftnoir_tracker_pt/Resources/Logo_IR.png | Bin 10386 -> 10385 bytes ftnoir_tracker_pt/Resources/cap_front.png | Bin 1164 -> 1163 bytes ftnoir_tracker_pt/Resources/cap_side.png | Bin 1733 -> 1732 bytes ftnoir_tracker_pt/Resources/clip_front.png | Bin 571 -> 570 bytes ftnoir_tracker_pt/Resources/clip_side.png | Bin 2677 -> 2676 bytes ftnoir_tracker_pt/camera.cpp | 692 +++--- ftnoir_tracker_pt/camera.h | 288 +-- ftnoir_tracker_pt/doc/logo.png | Bin 10386 -> 10385 bytes ftnoir_tracker_pt/doc/settings1.png | Bin 25013 -> 25012 bytes ftnoir_tracker_pt/doc/settings2.png | Bin 26841 -> 26840 bytes ftnoir_tracker_pt/doc/settings3.png | Bin 29547 -> 29546 bytes ftnoir_tracker_pt/doc/style.css | 262 +- ftnoir_tracker_pt/frame_observer.cpp | 36 +- ftnoir_tracker_pt/frame_observer.h | 150 +- ftnoir_tracker_pt/ftnoir_tracker_pt.cpp | 528 ++-- ftnoir_tracker_pt/ftnoir_tracker_pt.h | 186 +- ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp | 628 ++--- ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.h | 140 +- ftnoir_tracker_pt/ftnoir_tracker_pt_dll.cpp | 84 +- ftnoir_tracker_pt/ftnoir_tracker_pt_dll.h | 52 +- ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h | 162 +- ftnoir_tracker_pt/point_extractor.cpp | 326 +-- ftnoir_tracker_pt/point_extractor.h | 70 +- ftnoir_tracker_pt/point_tracker.cpp | 750 +++--- ftnoir_tracker_pt/point_tracker.h | 256 +- ftnoir_tracker_pt/pt_video_widget.cpp | 128 +- ftnoir_tracker_pt/pt_video_widget.h | 140 +- ftnoir_tracker_pt/trans_calib.cpp | 86 +- ftnoir_tracker_pt/trans_calib.h | 76 +- 30 files changed, 4065 insertions(+), 4065 deletions(-) (limited to 'ftnoir_tracker_pt/trans_calib.h') diff --git a/ftnoir_tracker_pt/FTNoIR_PT_Controls.ui b/ftnoir_tracker_pt/FTNoIR_PT_Controls.ui index 44dfc060..a2d5c47c 100644 --- a/ftnoir_tracker_pt/FTNoIR_PT_Controls.ui +++ b/ftnoir_tracker_pt/FTNoIR_PT_Controls.ui @@ -1,1545 +1,1545 @@ - - - UICPTClientControls - - - Qt::NonModal - - - - 0 - 0 - 459 - 621 - - - - - 0 - 0 - - - - PointTracker Settings - - - - :/Resources/Logo_IR.png:/Resources/Logo_IR.png - - - Qt::LeftToRight - - - false - - - - QLayout::SetFixedSize - - - - - - 0 - 0 - - - - - 0 - 0 - - - - - - - 0 - - - - General - - - - - - Tracker Thread - - - - - - Auto-reset time - - - reset_spin - - - - - - - - 0 - 0 - - - - Time until automatic reset of tracker's internal state when no valid tracking result is found - - - ms - - - 9999 - - - - - - - Dynamic Pose Resolution - - - - - - - - - - - - - - - - - false - - - Reset the tracker's internal state - - - Reset - - - - - - - - - - Qt::Vertical - - - - 20 - 20 - - - - - - - - - Camera - - - - - - The camera device used as input - - - Camera Settings - - - - - - - - - 55 - 0 - - - - Device - - - camdevice_combo - - - - - - - - 0 - 0 - - - - Camera device used as input - - - - - - - - - - - - - - 55 - 0 - - - - Resolution - - - - - - - FPS - - - fps_spin - - - - - - - - 0 - 0 - - - - Desired capture framerate - - - 999 - - - - - - - x - - - - - - - - 0 - 0 - - - - Desired capture width - - - 2000 - - - 10 - - - - - - - Desired capture height - - - 2000 - - - 10 - - - - - - - F/W - - - f_dspin - - - - - - - The camera's focal length devided by its sensor width - - - 2 - - - 0.100000000000000 - - - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - Qt::Horizontal - - - - 0 - 20 - - - - - - - - - - - - - - - - - - Camera Orientation - - - - - - - - - - Pitch - - - campitch_spin - - - - - - - Qt::DefaultContextMenu - - - The angle the camera is facing upwards - - - -99 - - - - - - - Yaw - - - camyaw_spin - - - - - - - Qt::DefaultContextMenu - - - The angle the camera is facing leftwards - - - -99 - - - - - - - deg (positve = leftwards) - - - - - - - - 0 - 0 - - - - - 0 - 0 - - - - Rotation of the camera image - - - - - - - deg (positive = upwards) - - - - - - - deg - - - - - - - Roll - - - camroll_combo - - - - - - - - - Qt::Horizontal - - - - 0 - 20 - - - - - - - - - - - - - Qt::Vertical - - - - 20 - 0 - - - - - - - - Point Extraction - - - - - - - - Threshold - - - threshold_slider - - - - - - - Intensity threshold for point extraction - - - 255 - - - 127 - - - Qt::Horizontal - - - - - - - - - - - Hysteresis - - - threshold_secondary_slider - - - - - - - Per pixel hysteresis width (leave left if there is little difference between dot and non-dot, move right for increased stability against pixel noise) - - - 255 - - - 1 - - - 100 - - - Qt::Horizontal - - - - - - - - - - - Min Diameter - - - mindiam_spin - - - - - - - Minimum point diameter - - - - - - - px - - - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - Max Diameter - - - maxdiam_spin - - - - - - - Maximum point diameter - - - - - - - px - - - - - - - - - - - - - Model - - - - - - QTabWidget::Rounded - - - 2 - - - false - - - false - - - false - - - - Clip - - - - - - Model Dimensions (mm) - - - - - - - 0 - 0 - - - - - 150 - 160 - - - - - - 30 - 30 - 71 - 111 - - - - - - - :/Resources/clip_side.png - - - - - - 100 - 50 - 46 - 22 - - - - 999 - - - - - - 60 - 10 - 46 - 22 - - - - 999 - - - - - - 100 - 90 - 46 - 22 - - - - 999 - - - - - - 10 - 10 - 46 - 13 - - - - Side - - - - - - 40 - 140 - 46 - 22 - - - - 999 - - - - - - 70 - 70 - 16 - 16 - - - - R - - - - - - - - - 0 - 0 - - - - - 100 - 140 - - - - - - 10 - 10 - 46 - 13 - - - - Front - - - - - - 40 - 30 - 21 - 111 - - - - - - - :/Resources/clip_front.png - - - - - - 60 - 70 - 16 - 16 - - - - R - - - - - - - - - - - - Cap - - - - - - Model Dimensions (mm) - - - - - - - 140 - 130 - - - - - - 20 - 50 - 111 - 81 - - - - - - - :/Resources/cap_side.png - - - - - - 30 - 80 - 46 - 22 - - - - 999 - - - - - - 130 - 50 - 16 - 16 - - - - R - - - - - - 10 - 10 - 46 - 13 - - - - Side - - - - - - 50 - 40 - 46 - 22 - - - - 999 - - - - - - - - - 0 - 0 - - - - - 100 - 130 - - - - - - 10 - 10 - 46 - 13 - - - - Front - - - - - - 30 - 50 - 16 - 16 - - - - R - - - - - - 10 - 50 - 81 - 81 - - - - - - - :/Resources/cap_front.png - - - - - - 50 - 30 - 46 - 22 - - - - 999 - - - - - - - - - - - - Custom - - - - - - Model Dimensions (mm) - - - - - - <html><head/><body><p>Location of the two remaining model points<br/>with respect to the reference point in default pose</p></body></html> - - - - - - - Qt::Vertical - - - - 20 - 0 - - - - - - - - - - Qt::Horizontal - - - - 10 - 20 - - - - - - - - - - -999 - - - 999 - - - - - - - y: - - - - - - - -999 - - - 999 - - - - - - - z: - - - - - - - M1: - - - - - - - -999 - - - 999 - - - - - - - x: - - - - - - - - - Qt::Horizontal - - - - 20 - 20 - - - - - - - - - - -999 - - - 999 - - - - - - - x: - - - - - - - z: - - - - - - - -999 - - - 999 - - - - - - - y: - - - - - - - M2: - - - - - - - - - - -999 - - - 999 - - - - - - - - - Qt::Horizontal - - - - 10 - 20 - - - - - - - - - - Qt::Vertical - - - - 20 - 0 - - - - - - - - - - - - - - - Model Position (mm) - - - - - - <html><head/><body><p>Translation from head center to model reference point<br/> in default pose</p></body></html> - - - - - - - - - Qt::Horizontal - - - - 10 - 20 - - - - - - - - - - - - - -999 - - - 999 - - - - - - - x: - - - - - - - y: - - - - - - - z: - - - - - - - -999 - - - 999 - - - - - - - -999 - - - 999 - - - - - - - - - Qt::Horizontal - - - - 20 - 20 - - - - - - - - false - - - Calibrate - - - true - - - - - - - Qt::Horizontal - - - - 10 - 20 - - - - - - - - - - - - - - About - - - - - 30 - 30 - 161 - 111 - - - - <html><head/><body><p><span style=" font-weight:600;">FTNoIR PointTracker Plugin<br/>Version 1.1</span></p><p><span style=" font-weight:600;">by Patrick Ruoff</span></p><p><a href="http://ftnoirpt.sourceforge.net/"><span style=" font-weight:600; text-decoration: underline; color:#0000ff;">Manual (external)</span></a></p></body></html> - - - true - - - - - - 200 - 30 - 141 - 141 - - - - - - - :/Resources/Logo_IR.png - - - - - - - - - Status - - - - - - QFormLayout::AllNonFixedFieldsGrow - - - - - Camera Info: - - - - - - - - 0 - 0 - - - - - 120 - 0 - - - - - - - - - - - Extracted Points: - - - - - - - - 50 - 0 - - - - - - - - - - - - - - - - - - Save - - - - - - - Qt::Horizontal - - - - 40 - 20 - - - - - - - - - - - Ok - - - - - - - - - - Cancel - - - - - - - - - tabWidget - reset_spin - camdevice_combo - res_x_spin - res_y_spin - fps_spin - f_dspin - camroll_combo - campitch_spin - camyaw_spin - threshold_slider - mindiam_spin - maxdiam_spin - model_tabs - clip_tlength_spin - clip_theight_spin - clip_bheight_spin - clip_blength_spin - cap_length_spin - cap_height_spin - cap_width_spin - m1x_spin - m1y_spin - m1z_spin - m2x_spin - m2y_spin - m2z_spin - tx_spin - ty_spin - tz_spin - tcalib_button - ok_button - cancel_button - - - - - - - dynpose_check - toggled(bool) - reset_spin - setEnabled(bool) - - - 172 - 110 - - - 351 - 112 - - - - - - startEngineClicked() - stopEngineClicked() - cameraSettingsClicked() - - + + + UICPTClientControls + + + Qt::NonModal + + + + 0 + 0 + 459 + 621 + + + + + 0 + 0 + + + + PointTracker Settings + + + + :/Resources/Logo_IR.png:/Resources/Logo_IR.png + + + Qt::LeftToRight + + + false + + + + QLayout::SetFixedSize + + + + + + 0 + 0 + + + + + 0 + 0 + + + + + + + 0 + + + + General + + + + + + Tracker Thread + + + + + + Auto-reset time + + + reset_spin + + + + + + + + 0 + 0 + + + + Time until automatic reset of tracker's internal state when no valid tracking result is found + + + ms + + + 9999 + + + + + + + Dynamic Pose Resolution + + + + + + + + + + + + + + + + + false + + + Reset the tracker's internal state + + + Reset + + + + + + + + + + Qt::Vertical + + + + 20 + 20 + + + + + + + + + Camera + + + + + + The camera device used as input + + + Camera Settings + + + + + + + + + 55 + 0 + + + + Device + + + camdevice_combo + + + + + + + + 0 + 0 + + + + Camera device used as input + + + + + + + + + + + + + + 55 + 0 + + + + Resolution + + + + + + + FPS + + + fps_spin + + + + + + + + 0 + 0 + + + + Desired capture framerate + + + 999 + + + + + + + x + + + + + + + + 0 + 0 + + + + Desired capture width + + + 2000 + + + 10 + + + + + + + Desired capture height + + + 2000 + + + 10 + + + + + + + F/W + + + f_dspin + + + + + + + The camera's focal length devided by its sensor width + + + 2 + + + 0.100000000000000 + + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + Qt::Horizontal + + + + 0 + 20 + + + + + + + + + + + + + + + + + + Camera Orientation + + + + + + + + + + Pitch + + + campitch_spin + + + + + + + Qt::DefaultContextMenu + + + The angle the camera is facing upwards + + + -99 + + + + + + + Yaw + + + camyaw_spin + + + + + + + Qt::DefaultContextMenu + + + The angle the camera is facing leftwards + + + -99 + + + + + + + deg (positve = leftwards) + + + + + + + + 0 + 0 + + + + + 0 + 0 + + + + Rotation of the camera image + + + + + + + deg (positive = upwards) + + + + + + + deg + + + + + + + Roll + + + camroll_combo + + + + + + + + + Qt::Horizontal + + + + 0 + 20 + + + + + + + + + + + + + Qt::Vertical + + + + 20 + 0 + + + + + + + + Point Extraction + + + + + + + + Threshold + + + threshold_slider + + + + + + + Intensity threshold for point extraction + + + 255 + + + 127 + + + Qt::Horizontal + + + + + + + + + + + Hysteresis + + + threshold_secondary_slider + + + + + + + Per pixel hysteresis width (leave left if there is little difference between dot and non-dot, move right for increased stability against pixel noise) + + + 255 + + + 1 + + + 100 + + + Qt::Horizontal + + + + + + + + + + + Min Diameter + + + mindiam_spin + + + + + + + Minimum point diameter + + + + + + + px + + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + Max Diameter + + + maxdiam_spin + + + + + + + Maximum point diameter + + + + + + + px + + + + + + + + + + + + + Model + + + + + + QTabWidget::Rounded + + + 2 + + + false + + + false + + + false + + + + Clip + + + + + + Model Dimensions (mm) + + + + + + + 0 + 0 + + + + + 150 + 160 + + + + + + 30 + 30 + 71 + 111 + + + + + + + :/Resources/clip_side.png + + + + + + 100 + 50 + 46 + 22 + + + + 999 + + + + + + 60 + 10 + 46 + 22 + + + + 999 + + + + + + 100 + 90 + 46 + 22 + + + + 999 + + + + + + 10 + 10 + 46 + 13 + + + + Side + + + + + + 40 + 140 + 46 + 22 + + + + 999 + + + + + + 70 + 70 + 16 + 16 + + + + R + + + + + + + + + 0 + 0 + + + + + 100 + 140 + + + + + + 10 + 10 + 46 + 13 + + + + Front + + + + + + 40 + 30 + 21 + 111 + + + + + + + :/Resources/clip_front.png + + + + + + 60 + 70 + 16 + 16 + + + + R + + + + + + + + + + + + Cap + + + + + + Model Dimensions (mm) + + + + + + + 140 + 130 + + + + + + 20 + 50 + 111 + 81 + + + + + + + :/Resources/cap_side.png + + + + + + 30 + 80 + 46 + 22 + + + + 999 + + + + + + 130 + 50 + 16 + 16 + + + + R + + + + + + 10 + 10 + 46 + 13 + + + + Side + + + + + + 50 + 40 + 46 + 22 + + + + 999 + + + + + + + + + 0 + 0 + + + + + 100 + 130 + + + + + + 10 + 10 + 46 + 13 + + + + Front + + + + + + 30 + 50 + 16 + 16 + + + + R + + + + + + 10 + 50 + 81 + 81 + + + + + + + :/Resources/cap_front.png + + + + + + 50 + 30 + 46 + 22 + + + + 999 + + + + + + + + + + + + Custom + + + + + + Model Dimensions (mm) + + + + + + <html><head/><body><p>Location of the two remaining model points<br/>with respect to the reference point in default pose</p></body></html> + + + + + + + Qt::Vertical + + + + 20 + 0 + + + + + + + + + + Qt::Horizontal + + + + 10 + 20 + + + + + + + + + + -999 + + + 999 + + + + + + + y: + + + + + + + -999 + + + 999 + + + + + + + z: + + + + + + + M1: + + + + + + + -999 + + + 999 + + + + + + + x: + + + + + + + + + Qt::Horizontal + + + + 20 + 20 + + + + + + + + + + -999 + + + 999 + + + + + + + x: + + + + + + + z: + + + + + + + -999 + + + 999 + + + + + + + y: + + + + + + + M2: + + + + + + + + + + -999 + + + 999 + + + + + + + + + Qt::Horizontal + + + + 10 + 20 + + + + + + + + + + Qt::Vertical + + + + 20 + 0 + + + + + + + + + + + + + + + Model Position (mm) + + + + + + <html><head/><body><p>Translation from head center to model reference point<br/> in default pose</p></body></html> + + + + + + + + + Qt::Horizontal + + + + 10 + 20 + + + + + + + + + + + + + -999 + + + 999 + + + + + + + x: + + + + + + + y: + + + + + + + z: + + + + + + + -999 + + + 999 + + + + + + + -999 + + + 999 + + + + + + + + + Qt::Horizontal + + + + 20 + 20 + + + + + + + + false + + + Calibrate + + + true + + + + + + + Qt::Horizontal + + + + 10 + 20 + + + + + + + + + + + + + + About + + + + + 30 + 30 + 161 + 111 + + + + <html><head/><body><p><span style=" font-weight:600;">FTNoIR PointTracker Plugin<br/>Version 1.1</span></p><p><span style=" font-weight:600;">by Patrick Ruoff</span></p><p><a href="http://ftnoirpt.sourceforge.net/"><span style=" font-weight:600; text-decoration: underline; color:#0000ff;">Manual (external)</span></a></p></body></html> + + + true + + + + + + 200 + 30 + 141 + 141 + + + + + + + :/Resources/Logo_IR.png + + + + + + + + + Status + + + + + + QFormLayout::AllNonFixedFieldsGrow + + + + + Camera Info: + + + + + + + + 0 + 0 + + + + + 120 + 0 + + + + + + + + + + + Extracted Points: + + + + + + + + 50 + 0 + + + + + + + + + + + + + + + + + + Save + + + + + + + Qt::Horizontal + + + + 40 + 20 + + + + + + + + + + + Ok + + + + + + + + + + Cancel + + + + + + + + + tabWidget + reset_spin + camdevice_combo + res_x_spin + res_y_spin + fps_spin + f_dspin + camroll_combo + campitch_spin + camyaw_spin + threshold_slider + mindiam_spin + maxdiam_spin + model_tabs + clip_tlength_spin + clip_theight_spin + clip_bheight_spin + clip_blength_spin + cap_length_spin + cap_height_spin + cap_width_spin + m1x_spin + m1y_spin + m1z_spin + m2x_spin + m2y_spin + m2z_spin + tx_spin + ty_spin + tz_spin + tcalib_button + ok_button + cancel_button + + + + + + + dynpose_check + toggled(bool) + reset_spin + setEnabled(bool) + + + 172 + 110 + + + 351 + 112 + + + + + + startEngineClicked() + stopEngineClicked() + cameraSettingsClicked() + + diff --git a/ftnoir_tracker_pt/Resources/Logo_IR.png b/ftnoir_tracker_pt/Resources/Logo_IR.png index 95032a25..85590691 100644 Binary files a/ftnoir_tracker_pt/Resources/Logo_IR.png and b/ftnoir_tracker_pt/Resources/Logo_IR.png differ diff --git a/ftnoir_tracker_pt/Resources/cap_front.png b/ftnoir_tracker_pt/Resources/cap_front.png index 14207a67..cbee28c9 100644 Binary files a/ftnoir_tracker_pt/Resources/cap_front.png and b/ftnoir_tracker_pt/Resources/cap_front.png differ diff --git a/ftnoir_tracker_pt/Resources/cap_side.png b/ftnoir_tracker_pt/Resources/cap_side.png index 5ad4ee65..27c28341 100644 Binary files a/ftnoir_tracker_pt/Resources/cap_side.png and b/ftnoir_tracker_pt/Resources/cap_side.png differ diff --git a/ftnoir_tracker_pt/Resources/clip_front.png b/ftnoir_tracker_pt/Resources/clip_front.png index 04880138..63fd70eb 100644 Binary files a/ftnoir_tracker_pt/Resources/clip_front.png and b/ftnoir_tracker_pt/Resources/clip_front.png differ diff --git a/ftnoir_tracker_pt/Resources/clip_side.png b/ftnoir_tracker_pt/Resources/clip_side.png index 72667ac7..1c295506 100644 Binary files a/ftnoir_tracker_pt/Resources/clip_side.png and b/ftnoir_tracker_pt/Resources/clip_side.png differ diff --git a/ftnoir_tracker_pt/camera.cpp b/ftnoir_tracker_pt/camera.cpp index 33e0ef2a..686e1b9b 100644 --- a/ftnoir_tracker_pt/camera.cpp +++ b/ftnoir_tracker_pt/camera.cpp @@ -1,346 +1,346 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - - #if defined(OPENTRACK_API) && defined(_WIN32) -#include -#include -#endif - -#include "camera.h" -#include -#include - -using namespace cv; - -#if defined(OPENTRACK_API) && (defined(__unix) || defined(__linux) || defined(__APPLE__)) -#include -#endif - -#ifdef OPENTRACK_API -void get_camera_device_names(std::vector& device_names) { -# if defined(_WIN32) - // Create the System Device Enumerator. - HRESULT hr; - ICreateDevEnum *pSysDevEnum = NULL; - hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); - if (FAILED(hr)) - { - return; - } - // Obtain a class enumerator for the video compressor category. - IEnumMoniker *pEnumCat = NULL; - hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); - - if (hr == S_OK) { - // Enumerate the monikers. - IMoniker *pMoniker = NULL; - ULONG cFetched; - while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) { - IPropertyBag *pPropBag; - hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); - if (SUCCEEDED(hr)) { - // To retrieve the filter's friendly name, do the following: - VARIANT varName; - VariantInit(&varName); - hr = pPropBag->Read(L"FriendlyName", &varName, 0); - if (SUCCEEDED(hr)) - { - auto wstr = std::wstring(varName.bstrVal); - auto str = std::string(wstr.begin(), wstr.end()); - device_names.push_back(str); - } - VariantClear(&varName); - - ////// To create an instance of the filter, do the following: - ////IBaseFilter *pFilter; - ////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, - //// (void**)&pFilter); - // Now add the filter to the graph. - //Remember to release pFilter later. - pPropBag->Release(); - } - pMoniker->Release(); - } - pEnumCat->Release(); - } - pSysDevEnum->Release(); -# else - for (int i = 0; i < 16; i++) { - char buf[128]; - sprintf(buf, "/dev/video%d", i); - if (access(buf, R_OK | W_OK) == 0) { - device_names.push_back(std::string(buf)); - } - } -# endif -} -#else -// ---------------------------------------------------------------------------- -void get_camera_device_names(std::vector& device_names) -{ - videoInput VI; - VI.listDevices(); - std::string device_name; - for(int index = 0; ; ++index) { - device_name = VI.getDeviceName(index); - if (device_name.empty()) break; - device_names.push_back(device_name); - } -} -#endif - -// ---------------------------------------------------------------------------- -void Camera::set_device_index(int index) -{ - if (desired_index != index) - { - desired_index = index; - _set_device_index(); - - // reset fps - dt_valid = 0; - dt_mean = 0; - active_index = index; - } -} - -void Camera::set_f(float f) -{ - if (cam_desired.f != f) - { - cam_desired.f = f; - _set_f(); - } -} -void Camera::set_fps(int fps) -{ - if (cam_desired.fps != fps) - { - cam_desired.fps = fps; - _set_fps(); - } -} - -void Camera::set_res(int x_res, int y_res) -{ - if (cam_desired.res_x != x_res || cam_desired.res_y != y_res) - { - cam_desired.res_x = x_res; - cam_desired.res_y = y_res; - _set_res(); - _set_fps(); - } -} - -bool Camera::get_frame(float dt, cv::Mat* frame) -{ - bool new_frame = _get_frame(frame); - // measure fps of valid frames - const float dt_smoothing_const = 0.9; - dt_valid += dt; - if (new_frame) - { - dt_mean = dt_smoothing_const * dt_mean + (1.0 - dt_smoothing_const) * dt_valid; - cam_info.fps = 1.0 / dt_mean; - dt_valid = 0; - } - return new_frame; -} - -// ---------------------------------------------------------------------------- -#ifdef OPENTRACK_API -void CVCamera::start() -{ - cap = new VideoCapture(desired_index); - // extract camera info - if (cap->isOpened()) - { - active = true; - active_index = desired_index; - cam_info.res_x = cap->get(CV_CAP_PROP_FRAME_WIDTH); - cam_info.res_y = cap->get(CV_CAP_PROP_FRAME_HEIGHT); - } else { - delete cap; - cap = nullptr; - } -} - -void CVCamera::stop() -{ - if (cap) - { - cap->release(); - delete cap; - } - active = false; -} - -bool CVCamera::_get_frame(Mat* frame) -{ - if (cap && cap->isOpened()) - { - Mat img; - for (int i = 0; i < 100 && !cap->read(img); i++) - ;; - - if (img.empty()) - return false; - - *frame = img; - return true; - } - return false; -} - -void CVCamera::_set_index() -{ - if (active) restart(); -} - -void CVCamera::_set_f() -{ - cam_info.f = cam_desired.f; -} - -void CVCamera::_set_fps() -{ - if (cap) cap->set(CV_CAP_PROP_FPS, cam_desired.fps); -} - -void CVCamera::_set_res() -{ - if (cap) - { - cap->set(CV_CAP_PROP_FRAME_WIDTH, cam_desired.res_x); - cap->set(CV_CAP_PROP_FRAME_HEIGHT, cam_desired.res_y); - cam_info.res_x = cap->get(CV_CAP_PROP_FRAME_WIDTH); - cam_info.res_y = cap->get(CV_CAP_PROP_FRAME_HEIGHT); - } -} -void CVCamera::_set_device_index() -{ - if (cap) - { - cap->release(); - delete cap; - } - cap = new VideoCapture(desired_index); -} - -#else -// ---------------------------------------------------------------------------- -VICamera::VICamera() : frame_buffer(NULL) -{ - VI.listDevices(); -} - -void VICamera::start() -{ - if (desired_index >= 0) - { - if (cam_desired.res_x == 0 || cam_desired.res_y == 0) - VI.setupDevice(desired_index); - else - VI.setupDevice(desired_index, cam_desired.res_x, cam_desired.res_y); - - active = true; - active_index = desired_index; - - cam_info.res_x = VI.getWidth(active_index); - cam_info.res_y = VI.getHeight(active_index); - new_frame = cv::Mat(cam_info.res_y, cam_info.res_x, CV_8UC3); - // If matrix is not continuous we have to copy manually via frame_buffer - if (!new_frame.isContinuous()) { - unsigned int size = VI.getSize(active_index); - frame_buffer = new unsigned char[size]; - } - } -} - -void VICamera::stop() -{ - if (active) - { - VI.stopDevice(active_index); - } - if (frame_buffer) - { - delete[] frame_buffer; - frame_buffer = NULL; - } - active = false; -} - -bool VICamera::_get_frame(Mat* frame) -{ - if (active && VI.isFrameNew(active_index)) - { - if (new_frame.isContinuous()) - { - VI.getPixels(active_index, new_frame.data, false, true); - } - else - { - // If matrix is not continuous we have to copy manually via frame_buffer - VI.getPixels(active_index, frame_buffer, false, true); - new_frame = cv::Mat(cam_info.res_y, cam_info.res_x, CV_8UC3, frame_buffer).clone(); - } - *frame = new_frame; - return true; - } - return false; -} - -void VICamera::_set_device_index() -{ - if (active) restart(); -} - -void VICamera::_set_f() -{ - cam_info.f = cam_desired.f; -} - -void VICamera::_set_fps() -{ - bool was_active = active; - if (active) stop(); - VI.setIdealFramerate(desired_index, cam_desired.fps); - if (was_active) start(); -} - -void VICamera::_set_res() -{ - if (active) restart(); -} -#endif - -// ---------------------------------------------------------------------------- -Mat FrameRotation::rotate_frame(Mat frame) -{ - switch (rotation) - { - case CLOCKWISE: - { - Mat dst; - transpose(frame, dst); - flip(dst, dst, 1); - return dst; - } - - case COUNTER_CLOCKWISE: - { - Mat dst; - transpose(frame, dst); - flip(dst, dst, 0); - return dst; - } - - default: - return frame; - } -} +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + + #if defined(OPENTRACK_API) && defined(_WIN32) +#include +#include +#endif + +#include "camera.h" +#include +#include + +using namespace cv; + +#if defined(OPENTRACK_API) && (defined(__unix) || defined(__linux) || defined(__APPLE__)) +#include +#endif + +#ifdef OPENTRACK_API +void get_camera_device_names(std::vector& device_names) { +# if defined(_WIN32) + // Create the System Device Enumerator. + HRESULT hr; + ICreateDevEnum *pSysDevEnum = NULL; + hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); + if (FAILED(hr)) + { + return; + } + // Obtain a class enumerator for the video compressor category. + IEnumMoniker *pEnumCat = NULL; + hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); + + if (hr == S_OK) { + // Enumerate the monikers. + IMoniker *pMoniker = NULL; + ULONG cFetched; + while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) { + IPropertyBag *pPropBag; + hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); + if (SUCCEEDED(hr)) { + // To retrieve the filter's friendly name, do the following: + VARIANT varName; + VariantInit(&varName); + hr = pPropBag->Read(L"FriendlyName", &varName, 0); + if (SUCCEEDED(hr)) + { + auto wstr = std::wstring(varName.bstrVal); + auto str = std::string(wstr.begin(), wstr.end()); + device_names.push_back(str); + } + VariantClear(&varName); + + ////// To create an instance of the filter, do the following: + ////IBaseFilter *pFilter; + ////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, + //// (void**)&pFilter); + // Now add the filter to the graph. + //Remember to release pFilter later. + pPropBag->Release(); + } + pMoniker->Release(); + } + pEnumCat->Release(); + } + pSysDevEnum->Release(); +# else + for (int i = 0; i < 16; i++) { + char buf[128]; + sprintf(buf, "/dev/video%d", i); + if (access(buf, R_OK | W_OK) == 0) { + device_names.push_back(std::string(buf)); + } + } +# endif +} +#else +// ---------------------------------------------------------------------------- +void get_camera_device_names(std::vector& device_names) +{ + videoInput VI; + VI.listDevices(); + std::string device_name; + for(int index = 0; ; ++index) { + device_name = VI.getDeviceName(index); + if (device_name.empty()) break; + device_names.push_back(device_name); + } +} +#endif + +// ---------------------------------------------------------------------------- +void Camera::set_device_index(int index) +{ + if (desired_index != index) + { + desired_index = index; + _set_device_index(); + + // reset fps + dt_valid = 0; + dt_mean = 0; + active_index = index; + } +} + +void Camera::set_f(float f) +{ + if (cam_desired.f != f) + { + cam_desired.f = f; + _set_f(); + } +} +void Camera::set_fps(int fps) +{ + if (cam_desired.fps != fps) + { + cam_desired.fps = fps; + _set_fps(); + } +} + +void Camera::set_res(int x_res, int y_res) +{ + if (cam_desired.res_x != x_res || cam_desired.res_y != y_res) + { + cam_desired.res_x = x_res; + cam_desired.res_y = y_res; + _set_res(); + _set_fps(); + } +} + +bool Camera::get_frame(float dt, cv::Mat* frame) +{ + bool new_frame = _get_frame(frame); + // measure fps of valid frames + const float dt_smoothing_const = 0.9; + dt_valid += dt; + if (new_frame) + { + dt_mean = dt_smoothing_const * dt_mean + (1.0 - dt_smoothing_const) * dt_valid; + cam_info.fps = 1.0 / dt_mean; + dt_valid = 0; + } + return new_frame; +} + +// ---------------------------------------------------------------------------- +#ifdef OPENTRACK_API +void CVCamera::start() +{ + cap = new VideoCapture(desired_index); + // extract camera info + if (cap->isOpened()) + { + active = true; + active_index = desired_index; + cam_info.res_x = cap->get(CV_CAP_PROP_FRAME_WIDTH); + cam_info.res_y = cap->get(CV_CAP_PROP_FRAME_HEIGHT); + } else { + delete cap; + cap = nullptr; + } +} + +void CVCamera::stop() +{ + if (cap) + { + cap->release(); + delete cap; + } + active = false; +} + +bool CVCamera::_get_frame(Mat* frame) +{ + if (cap && cap->isOpened()) + { + Mat img; + for (int i = 0; i < 100 && !cap->read(img); i++) + ;; + + if (img.empty()) + return false; + + *frame = img; + return true; + } + return false; +} + +void CVCamera::_set_index() +{ + if (active) restart(); +} + +void CVCamera::_set_f() +{ + cam_info.f = cam_desired.f; +} + +void CVCamera::_set_fps() +{ + if (cap) cap->set(CV_CAP_PROP_FPS, cam_desired.fps); +} + +void CVCamera::_set_res() +{ + if (cap) + { + cap->set(CV_CAP_PROP_FRAME_WIDTH, cam_desired.res_x); + cap->set(CV_CAP_PROP_FRAME_HEIGHT, cam_desired.res_y); + cam_info.res_x = cap->get(CV_CAP_PROP_FRAME_WIDTH); + cam_info.res_y = cap->get(CV_CAP_PROP_FRAME_HEIGHT); + } +} +void CVCamera::_set_device_index() +{ + if (cap) + { + cap->release(); + delete cap; + } + cap = new VideoCapture(desired_index); +} + +#else +// ---------------------------------------------------------------------------- +VICamera::VICamera() : frame_buffer(NULL) +{ + VI.listDevices(); +} + +void VICamera::start() +{ + if (desired_index >= 0) + { + if (cam_desired.res_x == 0 || cam_desired.res_y == 0) + VI.setupDevice(desired_index); + else + VI.setupDevice(desired_index, cam_desired.res_x, cam_desired.res_y); + + active = true; + active_index = desired_index; + + cam_info.res_x = VI.getWidth(active_index); + cam_info.res_y = VI.getHeight(active_index); + new_frame = cv::Mat(cam_info.res_y, cam_info.res_x, CV_8UC3); + // If matrix is not continuous we have to copy manually via frame_buffer + if (!new_frame.isContinuous()) { + unsigned int size = VI.getSize(active_index); + frame_buffer = new unsigned char[size]; + } + } +} + +void VICamera::stop() +{ + if (active) + { + VI.stopDevice(active_index); + } + if (frame_buffer) + { + delete[] frame_buffer; + frame_buffer = NULL; + } + active = false; +} + +bool VICamera::_get_frame(Mat* frame) +{ + if (active && VI.isFrameNew(active_index)) + { + if (new_frame.isContinuous()) + { + VI.getPixels(active_index, new_frame.data, false, true); + } + else + { + // If matrix is not continuous we have to copy manually via frame_buffer + VI.getPixels(active_index, frame_buffer, false, true); + new_frame = cv::Mat(cam_info.res_y, cam_info.res_x, CV_8UC3, frame_buffer).clone(); + } + *frame = new_frame; + return true; + } + return false; +} + +void VICamera::_set_device_index() +{ + if (active) restart(); +} + +void VICamera::_set_f() +{ + cam_info.f = cam_desired.f; +} + +void VICamera::_set_fps() +{ + bool was_active = active; + if (active) stop(); + VI.setIdealFramerate(desired_index, cam_desired.fps); + if (was_active) start(); +} + +void VICamera::_set_res() +{ + if (active) restart(); +} +#endif + +// ---------------------------------------------------------------------------- +Mat FrameRotation::rotate_frame(Mat frame) +{ + switch (rotation) + { + case CLOCKWISE: + { + Mat dst; + transpose(frame, dst); + flip(dst, dst, 1); + return dst; + } + + case COUNTER_CLOCKWISE: + { + Mat dst; + transpose(frame, dst); + flip(dst, dst, 0); + return dst; + } + + default: + return frame; + } +} diff --git a/ftnoir_tracker_pt/camera.h b/ftnoir_tracker_pt/camera.h index a9f60841..733cc61f 100644 --- a/ftnoir_tracker_pt/camera.h +++ b/ftnoir_tracker_pt/camera.h @@ -1,145 +1,145 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#ifndef CAMERA_H -#define CAMERA_H - -#include -#ifndef OPENTRACK_API -# include -#else +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef CAMERA_H +#define CAMERA_H + +#include +#ifndef OPENTRACK_API +# include +#else # include -# include -# include -#endif -#include - -// ---------------------------------------------------------------------------- -void get_camera_device_names(std::vector& device_names); - - -// ---------------------------------------------------------------------------- -struct CamInfo -{ - CamInfo() : res_x(0), res_y(0), fps(0), f(1) {} - - int res_x; - int res_y; - int fps; - float f; // (focal length) / (sensor width) -}; - -// ---------------------------------------------------------------------------- -// Base class for cameras, calculates the frame rate -class Camera -{ -public: - Camera() : dt_valid(0), dt_mean(0), desired_index(0), active_index(-1), active(false) {} - virtual ~Camera() {} - - // start/stop capturing - virtual void start() = 0; - virtual void stop() = 0; - void restart() { stop(); start(); } - - // calls corresponding template methods and reinitializes frame rate calculation - void set_device_index(int index); - void set_f(float f); - void set_fps(int fps); - void set_res(int x_res, int y_res); - - // gets a frame from the camera, dt: time since last call in seconds - bool get_frame(float dt, cv::Mat* frame); - - // WARNING: returned references are valid as long as object - const CamInfo& get_info() const { return cam_info; } - const CamInfo& get_desired() const { return cam_desired; } - -protected: - // get a frame from the camera - virtual bool _get_frame(cv::Mat* frame) = 0; - - // update the camera using cam_desired, write res and f to cam_info if successful - virtual void _set_device_index() = 0; - virtual void _set_f() = 0; - virtual void _set_fps() = 0; - virtual void _set_res() = 0; - - float dt_valid; - float dt_mean; - int desired_index; - int active_index; - bool active; - CamInfo cam_info; - CamInfo cam_desired; -}; - - -// ---------------------------------------------------------------------------- -// camera based on OpenCV's videoCapture -#ifdef OPENTRACK_API -class CVCamera : public Camera -{ -public: - CVCamera() : cap(NULL) {} - ~CVCamera() { stop(); } - - virtual void start(); - virtual void stop(); - -protected: - virtual bool _get_frame(cv::Mat* frame); - virtual void _set_index(); - virtual void _set_f(); - virtual void _set_fps(); - virtual void _set_res(); - virtual void _set_device_index(); - - cv::VideoCapture* cap; -}; -#else -// ---------------------------------------------------------------------------- -// Camera based on the videoInput library -class VICamera : public Camera -{ -public: - VICamera(); - ~VICamera() { stop(); } - - virtual void start(); - virtual void stop(); - -protected: - virtual bool _get_frame(cv::Mat* frame); - virtual void _set_device_index(); - virtual void _set_f(); - virtual void _set_fps(); - virtual void _set_res(); - - videoInput VI; - cv::Mat new_frame; - unsigned char* frame_buffer; -}; -#endif - -enum RotationType -{ - CLOCKWISE = 0, - ZERO = 1, - COUNTER_CLOCKWISE = 2 -}; - -// ---------------------------------------------------------------------------- -class FrameRotation -{ -public: - RotationType rotation; - - cv::Mat rotate_frame(cv::Mat frame); -}; - -#endif //CAMERA_H +# include +# include +#endif +#include + +// ---------------------------------------------------------------------------- +void get_camera_device_names(std::vector& device_names); + + +// ---------------------------------------------------------------------------- +struct CamInfo +{ + CamInfo() : res_x(0), res_y(0), fps(0), f(1) {} + + int res_x; + int res_y; + int fps; + float f; // (focal length) / (sensor width) +}; + +// ---------------------------------------------------------------------------- +// Base class for cameras, calculates the frame rate +class Camera +{ +public: + Camera() : dt_valid(0), dt_mean(0), desired_index(0), active_index(-1), active(false) {} + virtual ~Camera() {} + + // start/stop capturing + virtual void start() = 0; + virtual void stop() = 0; + void restart() { stop(); start(); } + + // calls corresponding template methods and reinitializes frame rate calculation + void set_device_index(int index); + void set_f(float f); + void set_fps(int fps); + void set_res(int x_res, int y_res); + + // gets a frame from the camera, dt: time since last call in seconds + bool get_frame(float dt, cv::Mat* frame); + + // WARNING: returned references are valid as long as object + const CamInfo& get_info() const { return cam_info; } + const CamInfo& get_desired() const { return cam_desired; } + +protected: + // get a frame from the camera + virtual bool _get_frame(cv::Mat* frame) = 0; + + // update the camera using cam_desired, write res and f to cam_info if successful + virtual void _set_device_index() = 0; + virtual void _set_f() = 0; + virtual void _set_fps() = 0; + virtual void _set_res() = 0; + + float dt_valid; + float dt_mean; + int desired_index; + int active_index; + bool active; + CamInfo cam_info; + CamInfo cam_desired; +}; + + +// ---------------------------------------------------------------------------- +// camera based on OpenCV's videoCapture +#ifdef OPENTRACK_API +class CVCamera : public Camera +{ +public: + CVCamera() : cap(NULL) {} + ~CVCamera() { stop(); } + + virtual void start(); + virtual void stop(); + +protected: + virtual bool _get_frame(cv::Mat* frame); + virtual void _set_index(); + virtual void _set_f(); + virtual void _set_fps(); + virtual void _set_res(); + virtual void _set_device_index(); + + cv::VideoCapture* cap; +}; +#else +// ---------------------------------------------------------------------------- +// Camera based on the videoInput library +class VICamera : public Camera +{ +public: + VICamera(); + ~VICamera() { stop(); } + + virtual void start(); + virtual void stop(); + +protected: + virtual bool _get_frame(cv::Mat* frame); + virtual void _set_device_index(); + virtual void _set_f(); + virtual void _set_fps(); + virtual void _set_res(); + + videoInput VI; + cv::Mat new_frame; + unsigned char* frame_buffer; +}; +#endif + +enum RotationType +{ + CLOCKWISE = 0, + ZERO = 1, + COUNTER_CLOCKWISE = 2 +}; + +// ---------------------------------------------------------------------------- +class FrameRotation +{ +public: + RotationType rotation; + + cv::Mat rotate_frame(cv::Mat frame); +}; + +#endif //CAMERA_H diff --git a/ftnoir_tracker_pt/doc/logo.png b/ftnoir_tracker_pt/doc/logo.png index 95032a25..85590691 100644 Binary files a/ftnoir_tracker_pt/doc/logo.png and b/ftnoir_tracker_pt/doc/logo.png differ diff --git a/ftnoir_tracker_pt/doc/settings1.png b/ftnoir_tracker_pt/doc/settings1.png index 35b84c5c..0725f5f4 100644 Binary files a/ftnoir_tracker_pt/doc/settings1.png and b/ftnoir_tracker_pt/doc/settings1.png differ diff --git a/ftnoir_tracker_pt/doc/settings2.png b/ftnoir_tracker_pt/doc/settings2.png index c6cfd1f3..382ed13a 100644 Binary files a/ftnoir_tracker_pt/doc/settings2.png and b/ftnoir_tracker_pt/doc/settings2.png differ diff --git a/ftnoir_tracker_pt/doc/settings3.png b/ftnoir_tracker_pt/doc/settings3.png index 5922403d..821453d1 100644 Binary files a/ftnoir_tracker_pt/doc/settings3.png and b/ftnoir_tracker_pt/doc/settings3.png differ diff --git a/ftnoir_tracker_pt/doc/style.css b/ftnoir_tracker_pt/doc/style.css index a8d3e333..0c3d29a6 100644 --- a/ftnoir_tracker_pt/doc/style.css +++ b/ftnoir_tracker_pt/doc/style.css @@ -1,131 +1,131 @@ -body { - width: 1000px; - font-size: 13px; - color: #000000; - padding: 0; - margin: 0 auto; - background: #444444; - font-family: verdana,arial; -} - -table { - border-width: 3px; - border-color: #0000FF; - border-style: ridge; - margin-top: 5px; - background-color: #E0E0FF; -} - -table.blind { - border: none; - background-color: #E6E6E6; -} - -fieldset.blind { - border: none; -} - -h1 { font-size: 160%; } -h2 { font-size: 140%; } -h3 { font-size: 115%; } - -.indent { - margin-left: 25px; -} - -p -{ - margin-left: 10px; -} - -li -{ - margin: 10px; -} - - -dl -{ - /*width: 80%;*/ - border-bottom: 1px solid #999; -} - -dt -{ - padding-top: 5px; - font-weight: bold; - border-top: 1px solid #999; -} - -dd -{ - padding: 5px; -} - - -hr { - color: #688938; -} - -a:link, a:visited { - color: #0000BF; -} -a:hover { - color: #0000FF; -} - -a.nav { - position: relative; - top: -30px; - display: block; - visibility: hidden; -} - -#navbar { - width: 1000px; - height: 30px; - background-color:#1a1a1b; - position: fixed; - margin: 0 auto; - padding: 0; -} - -#navbar ul -{ - list-style-type: none; - margin: 0 auto; - padding: 0; - overflow: hidden; -} - -#navbar li -{ - margin: 0 auto; - padding: 5px; - float:left; -} - -#navbar a:link,a:visited -{ - display:block; - width:150px; - font-weight:bold; - color:#e85d02; - text-align:center; - /*padding:4px;*/ - text-decoration:none; - /*text-transform:uppercase;*/ -} - -#navbar a:hover,a:active -{ - color:#ffffff; -} - -#content { - background-color:#ffffff; - padding: 15px; - padding-top: 40px; - padding-right: 40px; - margin: 0 auto; -} +body { + width: 1000px; + font-size: 13px; + color: #000000; + padding: 0; + margin: 0 auto; + background: #444444; + font-family: verdana,arial; +} + +table { + border-width: 3px; + border-color: #0000FF; + border-style: ridge; + margin-top: 5px; + background-color: #E0E0FF; +} + +table.blind { + border: none; + background-color: #E6E6E6; +} + +fieldset.blind { + border: none; +} + +h1 { font-size: 160%; } +h2 { font-size: 140%; } +h3 { font-size: 115%; } + +.indent { + margin-left: 25px; +} + +p +{ + margin-left: 10px; +} + +li +{ + margin: 10px; +} + + +dl +{ + /*width: 80%;*/ + border-bottom: 1px solid #999; +} + +dt +{ + padding-top: 5px; + font-weight: bold; + border-top: 1px solid #999; +} + +dd +{ + padding: 5px; +} + + +hr { + color: #688938; +} + +a:link, a:visited { + color: #0000BF; +} +a:hover { + color: #0000FF; +} + +a.nav { + position: relative; + top: -30px; + display: block; + visibility: hidden; +} + +#navbar { + width: 1000px; + height: 30px; + background-color:#1a1a1b; + position: fixed; + margin: 0 auto; + padding: 0; +} + +#navbar ul +{ + list-style-type: none; + margin: 0 auto; + padding: 0; + overflow: hidden; +} + +#navbar li +{ + margin: 0 auto; + padding: 5px; + float:left; +} + +#navbar a:link,a:visited +{ + display:block; + width:150px; + font-weight:bold; + color:#e85d02; + text-align:center; + /*padding:4px;*/ + text-decoration:none; + /*text-transform:uppercase;*/ +} + +#navbar a:hover,a:active +{ + color:#ffffff; +} + +#content { + background-color:#ffffff; + padding: 15px; + padding-top: 40px; + padding-right: 40px; + margin: 0 auto; +} diff --git a/ftnoir_tracker_pt/frame_observer.cpp b/ftnoir_tracker_pt/frame_observer.cpp index 281f3d57..76dee351 100644 --- a/ftnoir_tracker_pt/frame_observer.cpp +++ b/ftnoir_tracker_pt/frame_observer.cpp @@ -1,18 +1,18 @@ -/* Copyright (c) 2013 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#include "frame_observer.h" - -//----------------------------------------------------------------------------- -FrameProvider::~FrameProvider() -{ - QMutexLocker lock(&observer_mutex); - for (std::set::iterator iter=frame_observers.begin(); iter!=frame_observers.end(); ++iter) - { - (*iter)->on_frame_provider_destroy(); - } -} +/* Copyright (c) 2013 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#include "frame_observer.h" + +//----------------------------------------------------------------------------- +FrameProvider::~FrameProvider() +{ + QMutexLocker lock(&observer_mutex); + for (std::set::iterator iter=frame_observers.begin(); iter!=frame_observers.end(); ++iter) + { + (*iter)->on_frame_provider_destroy(); + } +} diff --git a/ftnoir_tracker_pt/frame_observer.h b/ftnoir_tracker_pt/frame_observer.h index c3c20259..ca8ffb46 100644 --- a/ftnoir_tracker_pt/frame_observer.h +++ b/ftnoir_tracker_pt/frame_observer.h @@ -1,76 +1,76 @@ -/* Copyright (c) 2013 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#ifndef FRAME_OBSERVER_H -#define FRAME_OBSERVER_H - -#include -#include -#ifndef OPENTRACK_API -# include -#else +/* Copyright (c) 2013 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef FRAME_OBSERVER_H +#define FRAME_OBSERVER_H + +#include +#include +#ifndef OPENTRACK_API +# include +#else # include -#endif -#include - -//----------------------------------------------------------------------------- -// Forward declarations -class FrameObserver; - -//----------------------------------------------------------------------------- -// Provides means to copy frame and point information if it has observers -// Instantiate a FrameObserver to get the information -class FrameProvider -{ - friend class FrameObserver; -public: - ~FrameProvider(); - -protected: - virtual bool get_frame_and_points(cv::Mat& frame, std::shared_ptr< std::vector >& points) = 0; - - bool has_observers() const { QMutexLocker lock(&observer_mutex); return !frame_observers.empty(); } - -private: - mutable QMutex observer_mutex; - void add_observer(FrameObserver* obs) { QMutexLocker lock(&observer_mutex); frame_observers.insert(obs); } - void remove_observer(FrameObserver* obs) { QMutexLocker lock(&observer_mutex); frame_observers.erase(obs); } - std::set frame_observers; -}; - -//----------------------------------------------------------------------------- -// Used to get frame and point information from MutexedFrameProvider -// Destroy instance if not interested anymore since a living -// FrameObserver instance causes MutexedFrameProvider to provide the information, -// potentially reducing its performance -class FrameObserver -{ -public: - FrameObserver(FrameProvider* provider) : provider(provider) { - provider->add_observer(this); - } - - ~FrameObserver() { - if (provider) provider->remove_observer(this); - } - - bool get_frame_and_points(cv::Mat& frame, std::shared_ptr< std::vector >& points) { - return provider ? provider->get_frame_and_points(frame, points) : false; - } - - void on_frame_provider_destroy() { - provider = NULL; - } - -protected: - FrameProvider* provider; - -private: - FrameObserver(const FrameObserver&); -}; - -#endif //FRAME_OBSERVER_H +#endif +#include + +//----------------------------------------------------------------------------- +// Forward declarations +class FrameObserver; + +//----------------------------------------------------------------------------- +// Provides means to copy frame and point information if it has observers +// Instantiate a FrameObserver to get the information +class FrameProvider +{ + friend class FrameObserver; +public: + ~FrameProvider(); + +protected: + virtual bool get_frame_and_points(cv::Mat& frame, std::shared_ptr< std::vector >& points) = 0; + + bool has_observers() const { QMutexLocker lock(&observer_mutex); return !frame_observers.empty(); } + +private: + mutable QMutex observer_mutex; + void add_observer(FrameObserver* obs) { QMutexLocker lock(&observer_mutex); frame_observers.insert(obs); } + void remove_observer(FrameObserver* obs) { QMutexLocker lock(&observer_mutex); frame_observers.erase(obs); } + std::set frame_observers; +}; + +//----------------------------------------------------------------------------- +// Used to get frame and point information from MutexedFrameProvider +// Destroy instance if not interested anymore since a living +// FrameObserver instance causes MutexedFrameProvider to provide the information, +// potentially reducing its performance +class FrameObserver +{ +public: + FrameObserver(FrameProvider* provider) : provider(provider) { + provider->add_observer(this); + } + + ~FrameObserver() { + if (provider) provider->remove_observer(this); + } + + bool get_frame_and_points(cv::Mat& frame, std::shared_ptr< std::vector >& points) { + return provider ? provider->get_frame_and_points(frame, points) : false; + } + + void on_frame_provider_destroy() { + provider = NULL; + } + +protected: + FrameProvider* provider; + +private: + FrameObserver(const FrameObserver&); +}; + +#endif //FRAME_OBSERVER_H diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt.cpp b/ftnoir_tracker_pt/ftnoir_tracker_pt.cpp index 219c8990..3fa6910d 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt.cpp +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt.cpp @@ -1,264 +1,264 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#include "ftnoir_tracker_pt.h" -#include -#include -#include -#include -#include - -using namespace std; -using namespace cv; - -//#define PT_PERF_LOG //log performance - -const float rad2deg = 180.0/3.14159265; -const float deg2rad = 1.0/rad2deg; - -//----------------------------------------------------------------------------- -Tracker::Tracker() - : mutex(QMutex::Recursive), - commands(0), - video_widget(NULL), - video_frame(NULL), - tracking_valid(false), - new_settings(nullptr) - -{ - qDebug()<<"Tracker::Tracker"; -} - -Tracker::~Tracker() -{ - qDebug()<<"Tracker::~Tracker"; - // terminate tracker thread - set_command(ABORT); - wait(); - s.video_widget = false; - delete video_widget; - video_widget = NULL; - if (video_frame->layout()) delete video_frame->layout(); -} - -void Tracker::set_command(Command command) -{ - //QMutexLocker lock(&mutex); - commands |= command; -} - -void Tracker::reset_command(Command command) -{ - //QMutexLocker lock(&mutex); - commands &= ~command; -} - -void Tracker::run() -{ - qDebug()<<"Tracker:: Thread started"; - -#ifdef PT_PERF_LOG - QFile log_file(QCoreApplication::applicationDirPath() + "/PointTrackerPerformance.txt"); - if (!log_file.open(QIODevice::WriteOnly | QIODevice::Text)) return; - QTextStream log_stream(&log_file); -#endif - - time.start(); - double dt; - bool new_frame; - forever - { - if (commands & ABORT) break; - if (commands & PAUSE) continue; - commands = 0; - apply_inner(); - dt = time.start() / 1000000000.; - - new_frame = camera.get_frame(dt, &frame); - - if (new_frame && !frame.empty()) - { - QMutexLocker lock(&mutex); - - frame = frame_rotation.rotate_frame(frame); - const std::vector& points = point_extractor.extract_points(frame, dt, true); - for (auto p : points) - { - auto p2 = cv::Point(p[0] * frame.cols + frame.cols/2, -p[1] * frame.cols + frame.rows/2); - cv::Scalar color(0, 255, 0); - cv::line(frame, - cv::Point(p2.x - 20, p2.y), - cv::Point(p2.x + 20, p2.y), - color, - 4); - cv::line(frame, - cv::Point(p2.x, p2.y - 20), - cv::Point(p2.x, p2.y + 20), - color, - 4); - } - tracking_valid = point_tracker.track(points, camera.get_info().f, dt); - video_widget->update_image(frame); - } -#ifdef PT_PERF_LOG - log_stream<<"dt: "<(static_cast(s.cam_roll)); - point_extractor.threshold_val = s.threshold; - point_extractor.threshold_secondary_val = s.threshold_secondary; - point_extractor.min_size = s.min_point_size; - point_extractor.max_size = s.max_point_size; - { - cv::Vec3f M01(s.m01_x, s.m01_y, s.m01_z); - cv::Vec3f M02(s.m02_x, s.m02_y, s.m02_z); - point_tracker.point_model = std::shared_ptr(new PointModel(M01, M02)); - } - point_tracker.dynamic_pose_resolution = s.dyn_pose_res; - point_tracker.dt_reset = s.reset_time / 1000.0; - t_MH = cv::Vec3f(s.t_MH_x, s.t_MH_y, s.t_MH_z); - R_GC = Matx33f( cos(deg2rad*s.cam_yaw), 0, sin(deg2rad*s.cam_yaw), - 0, 1, 0, - -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw)); - R_GC = R_GC * Matx33f( 1, 0, 0, - 0, cos(deg2rad*s.cam_pitch), sin(deg2rad*s.cam_pitch), - 0, -sin(deg2rad*s.cam_pitch), cos(deg2rad*s.cam_pitch)); - - FrameTrafo X_MH(Matx33f::eye(), t_MH); - X_GH_0 = R_GC * X_MH; - - qDebug()<<"Tracker::apply ends"; -} - -void Tracker::reset() -{ - QMutexLocker lock(&mutex); - point_tracker.reset(); -} - -void Tracker::center() -{ - point_tracker.reset(); // we also do a reset here since there is no reset shortkey yet - QMutexLocker lock(&mutex); - FrameTrafo X_CM_0 = point_tracker.get_pose(); - FrameTrafo X_MH(Matx33f::eye(), t_MH); - X_GH_0 = R_GC * X_CM_0 * X_MH; -} - -bool Tracker::get_frame_and_points(cv::Mat& frame_copy, std::shared_ptr< std::vector >& points) -{ - QMutexLocker lock(&mutex); - if (frame.empty()) return false; - - // copy the frame and points from the tracker thread - frame_copy = frame.clone(); - points = std::shared_ptr< vector >(new vector(point_extractor.get_points())); - return true; -} - -void Tracker::refreshVideo() -{ - if (video_widget) video_widget->update_frame_and_points(); -} - -void Tracker::StartTracker(QFrame *parent_window) -{ - this->video_frame = parent_window; - video_frame->setAttribute(Qt::WA_NativeWindow); - video_frame->show(); - video_widget = new PTVideoWidget(video_frame, this); - QHBoxLayout* video_layout = new QHBoxLayout(parent_window); - video_layout->setContentsMargins(0, 0, 0, 0); - video_layout->addWidget(video_widget); - video_frame->setLayout(video_layout); - video_widget->resize(video_frame->width(), video_frame->height()); - camera.start(); - apply(s); - start(); - reset_command(PAUSE); -} - -#ifndef OPENTRACK_API -void Tracker::StopTracker(bool exit) -{ - set_command(PAUSE); -} -#endif - -#ifdef OPENTRACK_API -#define THeadPoseData double -#endif - -void Tracker::GetHeadPoseData(THeadPoseData *data) -{ - { - QMutexLocker lock(&mutex); - - if (!tracking_valid) return; - - FrameTrafo X_CM = point_tracker.get_pose(); - FrameTrafo X_MH(Matx33f::eye(), t_MH); - FrameTrafo X_GH = R_GC * X_CM * X_MH; - Matx33f R = X_GH.R * X_GH_0.R.t(); - Vec3f t = X_GH.t - X_GH_0.t; - - // get translation(s) - data[TX] = t[0] / 10.0; // convert to cm - data[TY] = t[1] / 10.0; - data[TZ] = t[2] / 10.0; - - // translate rotation matrix from opengl (G) to roll-pitch-yaw (E) frame - // -z -> x, y -> z, x -> -y - Matx33f R_EG( 0, 0,-1, - -1, 0, 0, - 0, 1, 0); - R = R_EG * R * R_EG.t(); - - // extract rotation angles - float alpha, beta, gamma; - beta = atan2( -R(2,0), sqrt(R(2,1)*R(2,1) + R(2,2)*R(2,2)) ); - alpha = atan2( R(1,0), R(0,0)); - gamma = atan2( R(2,1), R(2,2)); - - data[Yaw] = rad2deg * alpha; - data[Pitch] = - rad2deg * beta; // FTNoIR expects a minus here - data[Roll] = rad2deg * gamma; - } -} - -//----------------------------------------------------------------------------- -#ifdef OPENTRACK_API -extern "C" OPENTRACK_EXPORT ITracker* CALLING_CONVENTION GetConstructor() -#else -#pragma comment(linker, "/export:GetTracker=_GetTracker@0") -OPENTRACK_EXPORT ITrackerPtr __stdcall GetTracker() -#endif -{ - return new Tracker; -} +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#include "ftnoir_tracker_pt.h" +#include +#include +#include +#include +#include + +using namespace std; +using namespace cv; + +//#define PT_PERF_LOG //log performance + +const float rad2deg = 180.0/3.14159265; +const float deg2rad = 1.0/rad2deg; + +//----------------------------------------------------------------------------- +Tracker::Tracker() + : mutex(QMutex::Recursive), + commands(0), + video_widget(NULL), + video_frame(NULL), + tracking_valid(false), + new_settings(nullptr) + +{ + qDebug()<<"Tracker::Tracker"; +} + +Tracker::~Tracker() +{ + qDebug()<<"Tracker::~Tracker"; + // terminate tracker thread + set_command(ABORT); + wait(); + s.video_widget = false; + delete video_widget; + video_widget = NULL; + if (video_frame->layout()) delete video_frame->layout(); +} + +void Tracker::set_command(Command command) +{ + //QMutexLocker lock(&mutex); + commands |= command; +} + +void Tracker::reset_command(Command command) +{ + //QMutexLocker lock(&mutex); + commands &= ~command; +} + +void Tracker::run() +{ + qDebug()<<"Tracker:: Thread started"; + +#ifdef PT_PERF_LOG + QFile log_file(QCoreApplication::applicationDirPath() + "/PointTrackerPerformance.txt"); + if (!log_file.open(QIODevice::WriteOnly | QIODevice::Text)) return; + QTextStream log_stream(&log_file); +#endif + + time.start(); + double dt; + bool new_frame; + forever + { + if (commands & ABORT) break; + if (commands & PAUSE) continue; + commands = 0; + apply_inner(); + dt = time.start() / 1000000000.; + + new_frame = camera.get_frame(dt, &frame); + + if (new_frame && !frame.empty()) + { + QMutexLocker lock(&mutex); + + frame = frame_rotation.rotate_frame(frame); + const std::vector& points = point_extractor.extract_points(frame, dt, true); + for (auto p : points) + { + auto p2 = cv::Point(p[0] * frame.cols + frame.cols/2, -p[1] * frame.cols + frame.rows/2); + cv::Scalar color(0, 255, 0); + cv::line(frame, + cv::Point(p2.x - 20, p2.y), + cv::Point(p2.x + 20, p2.y), + color, + 4); + cv::line(frame, + cv::Point(p2.x, p2.y - 20), + cv::Point(p2.x, p2.y + 20), + color, + 4); + } + tracking_valid = point_tracker.track(points, camera.get_info().f, dt); + video_widget->update_image(frame); + } +#ifdef PT_PERF_LOG + log_stream<<"dt: "<(static_cast(s.cam_roll)); + point_extractor.threshold_val = s.threshold; + point_extractor.threshold_secondary_val = s.threshold_secondary; + point_extractor.min_size = s.min_point_size; + point_extractor.max_size = s.max_point_size; + { + cv::Vec3f M01(s.m01_x, s.m01_y, s.m01_z); + cv::Vec3f M02(s.m02_x, s.m02_y, s.m02_z); + point_tracker.point_model = std::shared_ptr(new PointModel(M01, M02)); + } + point_tracker.dynamic_pose_resolution = s.dyn_pose_res; + point_tracker.dt_reset = s.reset_time / 1000.0; + t_MH = cv::Vec3f(s.t_MH_x, s.t_MH_y, s.t_MH_z); + R_GC = Matx33f( cos(deg2rad*s.cam_yaw), 0, sin(deg2rad*s.cam_yaw), + 0, 1, 0, + -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw)); + R_GC = R_GC * Matx33f( 1, 0, 0, + 0, cos(deg2rad*s.cam_pitch), sin(deg2rad*s.cam_pitch), + 0, -sin(deg2rad*s.cam_pitch), cos(deg2rad*s.cam_pitch)); + + FrameTrafo X_MH(Matx33f::eye(), t_MH); + X_GH_0 = R_GC * X_MH; + + qDebug()<<"Tracker::apply ends"; +} + +void Tracker::reset() +{ + QMutexLocker lock(&mutex); + point_tracker.reset(); +} + +void Tracker::center() +{ + point_tracker.reset(); // we also do a reset here since there is no reset shortkey yet + QMutexLocker lock(&mutex); + FrameTrafo X_CM_0 = point_tracker.get_pose(); + FrameTrafo X_MH(Matx33f::eye(), t_MH); + X_GH_0 = R_GC * X_CM_0 * X_MH; +} + +bool Tracker::get_frame_and_points(cv::Mat& frame_copy, std::shared_ptr< std::vector >& points) +{ + QMutexLocker lock(&mutex); + if (frame.empty()) return false; + + // copy the frame and points from the tracker thread + frame_copy = frame.clone(); + points = std::shared_ptr< vector >(new vector(point_extractor.get_points())); + return true; +} + +void Tracker::refreshVideo() +{ + if (video_widget) video_widget->update_frame_and_points(); +} + +void Tracker::StartTracker(QFrame *parent_window) +{ + this->video_frame = parent_window; + video_frame->setAttribute(Qt::WA_NativeWindow); + video_frame->show(); + video_widget = new PTVideoWidget(video_frame, this); + QHBoxLayout* video_layout = new QHBoxLayout(parent_window); + video_layout->setContentsMargins(0, 0, 0, 0); + video_layout->addWidget(video_widget); + video_frame->setLayout(video_layout); + video_widget->resize(video_frame->width(), video_frame->height()); + camera.start(); + apply(s); + start(); + reset_command(PAUSE); +} + +#ifndef OPENTRACK_API +void Tracker::StopTracker(bool exit) +{ + set_command(PAUSE); +} +#endif + +#ifdef OPENTRACK_API +#define THeadPoseData double +#endif + +void Tracker::GetHeadPoseData(THeadPoseData *data) +{ + { + QMutexLocker lock(&mutex); + + if (!tracking_valid) return; + + FrameTrafo X_CM = point_tracker.get_pose(); + FrameTrafo X_MH(Matx33f::eye(), t_MH); + FrameTrafo X_GH = R_GC * X_CM * X_MH; + Matx33f R = X_GH.R * X_GH_0.R.t(); + Vec3f t = X_GH.t - X_GH_0.t; + + // get translation(s) + data[TX] = t[0] / 10.0; // convert to cm + data[TY] = t[1] / 10.0; + data[TZ] = t[2] / 10.0; + + // translate rotation matrix from opengl (G) to roll-pitch-yaw (E) frame + // -z -> x, y -> z, x -> -y + Matx33f R_EG( 0, 0,-1, + -1, 0, 0, + 0, 1, 0); + R = R_EG * R * R_EG.t(); + + // extract rotation angles + float alpha, beta, gamma; + beta = atan2( -R(2,0), sqrt(R(2,1)*R(2,1) + R(2,2)*R(2,2)) ); + alpha = atan2( R(1,0), R(0,0)); + gamma = atan2( R(2,1), R(2,2)); + + data[Yaw] = rad2deg * alpha; + data[Pitch] = - rad2deg * beta; // FTNoIR expects a minus here + data[Roll] = rad2deg * gamma; + } +} + +//----------------------------------------------------------------------------- +#ifdef OPENTRACK_API +extern "C" OPENTRACK_EXPORT ITracker* CALLING_CONVENTION GetConstructor() +#else +#pragma comment(linker, "/export:GetTracker=_GetTracker@0") +OPENTRACK_EXPORT ITrackerPtr __stdcall GetTracker() +#endif +{ + return new Tracker; +} diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt.h b/ftnoir_tracker_pt/ftnoir_tracker_pt.h index 63b8353e..3d9a83fd 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt.h +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt.h @@ -1,94 +1,94 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#ifndef FTNOIR_TRACKER_PT_H -#define FTNOIR_TRACKER_PT_H - -#ifdef OPENTRACK_API -# include "facetracknoir/plugin-api.hpp" -#endif -#include "ftnoir_tracker_pt_settings.h" -#include "frame_observer.h" -#include "camera.h" -#include "point_extractor.h" -#include "point_tracker.h" -#include "pt_video_widget.h" -#include "facetracknoir/timer.hpp" - -#include -#include -#include -#include -#include -#include -#ifndef OPENTRACK_API -# include -#else +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef FTNOIR_TRACKER_PT_H +#define FTNOIR_TRACKER_PT_H + +#ifdef OPENTRACK_API +# include "facetracknoir/plugin-api.hpp" +#endif +#include "ftnoir_tracker_pt_settings.h" +#include "frame_observer.h" +#include "camera.h" +#include "point_extractor.h" +#include "point_tracker.h" +#include "pt_video_widget.h" +#include "facetracknoir/timer.hpp" + +#include +#include +#include +#include +#include +#include +#ifndef OPENTRACK_API +# include +#else # include -#endif -#include - -//----------------------------------------------------------------------------- -// Constantly processes the tracking chain in a separate thread -class Tracker : public ITracker, QThread, public FrameProvider -{ -public: - Tracker(); - virtual ~Tracker(); - virtual void StartTracker(QFrame* parent_window); - virtual void GetHeadPoseData(double* data); - virtual void refreshVideo(); - - void apply(settings& s); - void apply_inner(); - void center(); - void reset(); // reset the trackers internal state variables - void run(); - - void get_pose(FrameTrafo* X_CM) { QMutexLocker lock(&mutex); *X_CM = point_tracker.get_pose(); } - int get_n_points() { QMutexLocker lock(&mutex); return point_extractor.get_points().size(); } - void get_cam_info(CamInfo* info) { QMutexLocker lock(&mutex); *info = camera.get_info(); } - -protected: - // --- MutexedFrameProvider interface --- - virtual bool get_frame_and_points(cv::Mat& frame, std::shared_ptr< std::vector >& points); - - // --- thread --- - QMutex mutex; - // thread commands - enum Command { - ABORT = 1<<0, - PAUSE = 1<<1 - }; - void set_command(Command command); - void reset_command(Command command); - volatile int commands; - - CVCamera camera; - FrameRotation frame_rotation; - PointExtractor point_extractor; - PointTracker point_tracker; - - FrameTrafo X_GH_0; // for centering - cv::Vec3f t_MH; // translation from model frame to head frame - cv::Matx33f R_GC; // rotation from opengl reference frame to camera frame - - // --- ui --- - cv::Mat frame; // the output frame for display - - PTVideoWidget* video_widget; - QFrame* video_frame; - bool tracking_valid; - - settings s; - std::atomic new_settings; - Timer time; -}; - -#undef VideoWidget - -#endif // FTNOIR_TRACKER_PT_H +#endif +#include + +//----------------------------------------------------------------------------- +// Constantly processes the tracking chain in a separate thread +class Tracker : public ITracker, QThread, public FrameProvider +{ +public: + Tracker(); + virtual ~Tracker(); + virtual void StartTracker(QFrame* parent_window); + virtual void GetHeadPoseData(double* data); + virtual void refreshVideo(); + + void apply(settings& s); + void apply_inner(); + void center(); + void reset(); // reset the trackers internal state variables + void run(); + + void get_pose(FrameTrafo* X_CM) { QMutexLocker lock(&mutex); *X_CM = point_tracker.get_pose(); } + int get_n_points() { QMutexLocker lock(&mutex); return point_extractor.get_points().size(); } + void get_cam_info(CamInfo* info) { QMutexLocker lock(&mutex); *info = camera.get_info(); } + +protected: + // --- MutexedFrameProvider interface --- + virtual bool get_frame_and_points(cv::Mat& frame, std::shared_ptr< std::vector >& points); + + // --- thread --- + QMutex mutex; + // thread commands + enum Command { + ABORT = 1<<0, + PAUSE = 1<<1 + }; + void set_command(Command command); + void reset_command(Command command); + volatile int commands; + + CVCamera camera; + FrameRotation frame_rotation; + PointExtractor point_extractor; + PointTracker point_tracker; + + FrameTrafo X_GH_0; // for centering + cv::Vec3f t_MH; // translation from model frame to head frame + cv::Matx33f R_GC; // rotation from opengl reference frame to camera frame + + // --- ui --- + cv::Mat frame; // the output frame for display + + PTVideoWidget* video_widget; + QFrame* video_frame; + bool tracking_valid; + + settings s; + std::atomic new_settings; + Timer time; +}; + +#undef VideoWidget + +#endif // FTNOIR_TRACKER_PT_H diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp b/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp index ae84ce8c..e037a099 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp @@ -1,314 +1,314 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#include "ftnoir_tracker_pt_dialog.h" - -#include -#include -#include -#ifndef OPENTRACK_API -# include -#else -# include -#endif -#include - -using namespace std; - -//----------------------------------------------------------------------------- -TrackerDialog::TrackerDialog() - : tracker(NULL), - video_widget_dialog(NULL), - timer(this), - trans_calib_running(false) -{ - qDebug()<<"TrackerDialog::TrackerDialog"; - setAttribute(Qt::WA_DeleteOnClose, false); - - ui.setupUi( this ); - - vector device_names; - get_camera_device_names(device_names); - for (vector::iterator iter = device_names.begin(); iter != device_names.end(); ++iter) - { - ui.camdevice_combo->addItem(iter->c_str()); - } - - ui.camroll_combo->addItem("-90"); - ui.camroll_combo->addItem("0"); - ui.camroll_combo->addItem("90"); - - tie_setting(s.dyn_pose_res, ui.dynpose_check); - tie_setting(s.reset_time, ui.reset_spin); - - tie_setting(s.cam_index, ui.camdevice_combo); - tie_setting(s.cam_f, ui.f_dspin); - tie_setting(s.cam_res_x, ui.res_x_spin); - tie_setting(s.cam_res_y, ui.res_y_spin); - tie_setting(s.cam_fps, ui.fps_spin); - tie_setting(s.cam_roll, ui.camroll_combo); - tie_setting(s.cam_pitch, ui.campitch_spin); - tie_setting(s.cam_yaw, ui.camyaw_spin); - - tie_setting(s.threshold_secondary, ui.threshold_secondary_slider); - tie_setting(s.threshold, ui.threshold_slider); - - tie_setting(s.min_point_size, ui.mindiam_spin); - tie_setting(s.max_point_size, ui.maxdiam_spin); - - tie_setting(s.clip_by, ui.clip_bheight_spin); - tie_setting(s.clip_bz, ui.clip_blength_spin); - tie_setting(s.clip_ty, ui.clip_theight_spin); - tie_setting(s.clip_tz, ui.clip_tlength_spin); - - tie_setting(s.cap_x, ui.cap_width_spin); - tie_setting(s.cap_y, ui.cap_height_spin); - tie_setting(s.cap_z, ui.cap_length_spin); - - tie_setting(s.m01_x, ui.m1x_spin); - tie_setting(s.m01_y, ui.m1y_spin); - tie_setting(s.m01_z, ui.m1z_spin); - - tie_setting(s.m02_x, ui.m2x_spin); - tie_setting(s.m02_y, ui.m2y_spin); - tie_setting(s.m02_z, ui.m2z_spin); - - tie_setting(s.t_MH_x, ui.tx_spin); - tie_setting(s.t_MH_y, ui.ty_spin); - tie_setting(s.t_MH_z, ui.tz_spin); - - connect( ui.tcalib_button,SIGNAL(toggled(bool)), this,SLOT(startstop_trans_calib(bool)) ); - connect(ui.reset_button, SIGNAL(clicked()), this, SLOT(doReset())); - - connect(ui.ok_button, SIGNAL(clicked()), this, SLOT(doOK())); - connect(ui.cancel_button, SIGNAL(clicked()), this, SLOT(doCancel())); - connect(ui.btnApply, SIGNAL(clicked()), this, SLOT(doApply())); - - ui.model_tabs->setCurrentIndex(s.active_model_panel); - - connect(ui.model_tabs, SIGNAL(currentChanged(int)), this, SLOT(set_model(int))); - connect(&timer,SIGNAL(timeout()), this,SLOT(poll_tracker_info())); - timer.start(100); - - connect(s.b.get(), SIGNAL(bundleChanged()), this, SLOT(do_apply_without_saving())); -} - -void TrackerDialog::set_model_clip() -{ - s.m01_x = 0; - s.m01_y = static_cast(s.clip_ty); - s.m01_z = -static_cast(s.clip_tz); - s.m02_x = 0; - s.m02_y = -static_cast(s.clip_by); - s.m02_z = -static_cast(s.clip_bz); - - settings_changed(); -} - -void TrackerDialog::set_model_cap() -{ - s.m01_x = -static_cast(s.cap_x); - s.m01_y = -static_cast(s.cap_y); - s.m01_z = -static_cast(s.cap_z); - s.m02_x = static_cast(s.cap_x); - s.m02_y = -static_cast(s.cap_y); - s.m02_z = -static_cast(s.cap_z); - - settings_changed(); -} - -void TrackerDialog::set_model_custom() -{ - settings_changed(); -} - -void TrackerDialog::set_model(int val) -{ - s.active_model_panel = val; -} - -void TrackerDialog::startstop_trans_calib(bool start) -{ - if (start) - { - qDebug()<<"TrackerDialog:: Starting translation calibration"; - trans_calib.reset(); - trans_calib_running = true; - } - else - { - qDebug()<<"TrackerDialog:: Stoppping translation calibration"; - trans_calib_running = false; - { - auto tmp = trans_calib.get_estimate(); - s.t_MH_x = tmp[0]; - s.t_MH_y = tmp[1]; - s.t_MH_z = tmp[2]; - } - settings_changed(); - } -} - -void TrackerDialog::trans_calib_step() -{ - if (tracker) - { - FrameTrafo X_CM; - tracker->get_pose(&X_CM); - trans_calib.update(X_CM.R, X_CM.t); - cv::Vec3f t_MH = trans_calib.get_estimate(); - s.t_MH_x = t_MH[0]; - s.t_MH_y = t_MH[1]; - s.t_MH_z = t_MH[2]; - } -} - -void TrackerDialog::settings_changed() -{ - if (tracker) tracker->apply(s); -} - -void TrackerDialog::doCenter() -{ - if (tracker) tracker->center(); -} - -void TrackerDialog::doReset() -{ - if (tracker) tracker->reset(); -} - -void TrackerDialog::save() -{ - do_apply_without_saving(); - s.b->save(); -} - -void TrackerDialog::doOK() -{ - save(); - close(); -} - -void TrackerDialog::do_apply_without_saving() -{ - switch (s.active_model_panel) { - default: - case 0: - set_model_clip(); - break; - case 1: - set_model_cap(); - break; - case 2: - set_model_custom(); - break; - } - if (tracker) tracker->apply(s); -} - -void TrackerDialog::doApply() -{ - save(); -} - -void TrackerDialog::doCancel() -{ - s.b->revert(); - close(); -} - -void TrackerDialog::widget_destroyed(QObject* obj) -{ - if (obj == video_widget_dialog) { - // widget was / will be already deleted by Qt - destroy_video_widget(false); - } -} - -void TrackerDialog::create_video_widget() -{ - // this should not happen but better be sure - if (video_widget_dialog) destroy_video_widget(); - if (!tracker) return; - - video_widget_dialog = new VideoWidgetDialog(this, tracker); - video_widget_dialog->setAttribute( Qt::WA_DeleteOnClose ); - connect( video_widget_dialog, SIGNAL(destroyed(QObject*)), this, SLOT(widget_destroyed(QObject*)) ); - video_widget_dialog->show(); -} - -void TrackerDialog::destroy_video_widget(bool do_delete /*= true*/) -{ - if (video_widget_dialog) { - if (do_delete) delete video_widget_dialog; - video_widget_dialog = NULL; - } -} - -void TrackerDialog::poll_tracker_info() -{ - if (tracker) - { - QString to_print; - - // display caminfo - CamInfo info; - tracker->get_cam_info(&info); - to_print = QString::number(info.res_x)+"x"+QString::number(info.res_y)+" @ "+QString::number(info.fps)+" FPS"; - ui.caminfo_label->setText(to_print); - - // display pointinfo - int n_points = tracker->get_n_points(); - to_print = QString::number(n_points); - if (n_points == 3) - to_print += " OK!"; - else - to_print += " BAD!"; - ui.pointinfo_label->setText(to_print); - - // update calibration - if (trans_calib_running) trans_calib_step(); - - // update videowidget - if (video_widget_dialog) { - video_widget_dialog->get_video_widget()->update_frame_and_points(); - } - } - else - { - QString to_print = "Tracker offline"; - ui.caminfo_label->setText(to_print); - ui.pointinfo_label->setText(to_print); - } -} - -void TrackerDialog::registerTracker(ITracker *t) -{ - qDebug()<<"TrackerDialog:: Tracker registered"; - tracker = static_cast(t); - if (isVisible() & s.b->modifiedp()) - tracker->apply(s); - ui.tcalib_button->setEnabled(true); - //ui.center_button->setEnabled(true); - ui.reset_button->setEnabled(true); -} - -void TrackerDialog::unRegisterTracker() -{ - qDebug()<<"TrackerDialog:: Tracker un-registered"; - tracker = NULL; - destroy_video_widget(); - ui.tcalib_button->setEnabled(false); - //ui.center_button->setEnabled(false); - ui.reset_button->setEnabled(false); -} - -extern "C" OPENTRACK_EXPORT ITrackerDialog* CALLING_CONVENTION GetDialog( ) -{ - return new TrackerDialog; -} +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#include "ftnoir_tracker_pt_dialog.h" + +#include +#include +#include +#ifndef OPENTRACK_API +# include +#else +# include +#endif +#include + +using namespace std; + +//----------------------------------------------------------------------------- +TrackerDialog::TrackerDialog() + : tracker(NULL), + video_widget_dialog(NULL), + timer(this), + trans_calib_running(false) +{ + qDebug()<<"TrackerDialog::TrackerDialog"; + setAttribute(Qt::WA_DeleteOnClose, false); + + ui.setupUi( this ); + + vector device_names; + get_camera_device_names(device_names); + for (vector::iterator iter = device_names.begin(); iter != device_names.end(); ++iter) + { + ui.camdevice_combo->addItem(iter->c_str()); + } + + ui.camroll_combo->addItem("-90"); + ui.camroll_combo->addItem("0"); + ui.camroll_combo->addItem("90"); + + tie_setting(s.dyn_pose_res, ui.dynpose_check); + tie_setting(s.reset_time, ui.reset_spin); + + tie_setting(s.cam_index, ui.camdevice_combo); + tie_setting(s.cam_f, ui.f_dspin); + tie_setting(s.cam_res_x, ui.res_x_spin); + tie_setting(s.cam_res_y, ui.res_y_spin); + tie_setting(s.cam_fps, ui.fps_spin); + tie_setting(s.cam_roll, ui.camroll_combo); + tie_setting(s.cam_pitch, ui.campitch_spin); + tie_setting(s.cam_yaw, ui.camyaw_spin); + + tie_setting(s.threshold_secondary, ui.threshold_secondary_slider); + tie_setting(s.threshold, ui.threshold_slider); + + tie_setting(s.min_point_size, ui.mindiam_spin); + tie_setting(s.max_point_size, ui.maxdiam_spin); + + tie_setting(s.clip_by, ui.clip_bheight_spin); + tie_setting(s.clip_bz, ui.clip_blength_spin); + tie_setting(s.clip_ty, ui.clip_theight_spin); + tie_setting(s.clip_tz, ui.clip_tlength_spin); + + tie_setting(s.cap_x, ui.cap_width_spin); + tie_setting(s.cap_y, ui.cap_height_spin); + tie_setting(s.cap_z, ui.cap_length_spin); + + tie_setting(s.m01_x, ui.m1x_spin); + tie_setting(s.m01_y, ui.m1y_spin); + tie_setting(s.m01_z, ui.m1z_spin); + + tie_setting(s.m02_x, ui.m2x_spin); + tie_setting(s.m02_y, ui.m2y_spin); + tie_setting(s.m02_z, ui.m2z_spin); + + tie_setting(s.t_MH_x, ui.tx_spin); + tie_setting(s.t_MH_y, ui.ty_spin); + tie_setting(s.t_MH_z, ui.tz_spin); + + connect( ui.tcalib_button,SIGNAL(toggled(bool)), this,SLOT(startstop_trans_calib(bool)) ); + connect(ui.reset_button, SIGNAL(clicked()), this, SLOT(doReset())); + + connect(ui.ok_button, SIGNAL(clicked()), this, SLOT(doOK())); + connect(ui.cancel_button, SIGNAL(clicked()), this, SLOT(doCancel())); + connect(ui.btnApply, SIGNAL(clicked()), this, SLOT(doApply())); + + ui.model_tabs->setCurrentIndex(s.active_model_panel); + + connect(ui.model_tabs, SIGNAL(currentChanged(int)), this, SLOT(set_model(int))); + connect(&timer,SIGNAL(timeout()), this,SLOT(poll_tracker_info())); + timer.start(100); + + connect(s.b.get(), SIGNAL(bundleChanged()), this, SLOT(do_apply_without_saving())); +} + +void TrackerDialog::set_model_clip() +{ + s.m01_x = 0; + s.m01_y = static_cast(s.clip_ty); + s.m01_z = -static_cast(s.clip_tz); + s.m02_x = 0; + s.m02_y = -static_cast(s.clip_by); + s.m02_z = -static_cast(s.clip_bz); + + settings_changed(); +} + +void TrackerDialog::set_model_cap() +{ + s.m01_x = -static_cast(s.cap_x); + s.m01_y = -static_cast(s.cap_y); + s.m01_z = -static_cast(s.cap_z); + s.m02_x = static_cast(s.cap_x); + s.m02_y = -static_cast(s.cap_y); + s.m02_z = -static_cast(s.cap_z); + + settings_changed(); +} + +void TrackerDialog::set_model_custom() +{ + settings_changed(); +} + +void TrackerDialog::set_model(int val) +{ + s.active_model_panel = val; +} + +void TrackerDialog::startstop_trans_calib(bool start) +{ + if (start) + { + qDebug()<<"TrackerDialog:: Starting translation calibration"; + trans_calib.reset(); + trans_calib_running = true; + } + else + { + qDebug()<<"TrackerDialog:: Stoppping translation calibration"; + trans_calib_running = false; + { + auto tmp = trans_calib.get_estimate(); + s.t_MH_x = tmp[0]; + s.t_MH_y = tmp[1]; + s.t_MH_z = tmp[2]; + } + settings_changed(); + } +} + +void TrackerDialog::trans_calib_step() +{ + if (tracker) + { + FrameTrafo X_CM; + tracker->get_pose(&X_CM); + trans_calib.update(X_CM.R, X_CM.t); + cv::Vec3f t_MH = trans_calib.get_estimate(); + s.t_MH_x = t_MH[0]; + s.t_MH_y = t_MH[1]; + s.t_MH_z = t_MH[2]; + } +} + +void TrackerDialog::settings_changed() +{ + if (tracker) tracker->apply(s); +} + +void TrackerDialog::doCenter() +{ + if (tracker) tracker->center(); +} + +void TrackerDialog::doReset() +{ + if (tracker) tracker->reset(); +} + +void TrackerDialog::save() +{ + do_apply_without_saving(); + s.b->save(); +} + +void TrackerDialog::doOK() +{ + save(); + close(); +} + +void TrackerDialog::do_apply_without_saving() +{ + switch (s.active_model_panel) { + default: + case 0: + set_model_clip(); + break; + case 1: + set_model_cap(); + break; + case 2: + set_model_custom(); + break; + } + if (tracker) tracker->apply(s); +} + +void TrackerDialog::doApply() +{ + save(); +} + +void TrackerDialog::doCancel() +{ + s.b->revert(); + close(); +} + +void TrackerDialog::widget_destroyed(QObject* obj) +{ + if (obj == video_widget_dialog) { + // widget was / will be already deleted by Qt + destroy_video_widget(false); + } +} + +void TrackerDialog::create_video_widget() +{ + // this should not happen but better be sure + if (video_widget_dialog) destroy_video_widget(); + if (!tracker) return; + + video_widget_dialog = new VideoWidgetDialog(this, tracker); + video_widget_dialog->setAttribute( Qt::WA_DeleteOnClose ); + connect( video_widget_dialog, SIGNAL(destroyed(QObject*)), this, SLOT(widget_destroyed(QObject*)) ); + video_widget_dialog->show(); +} + +void TrackerDialog::destroy_video_widget(bool do_delete /*= true*/) +{ + if (video_widget_dialog) { + if (do_delete) delete video_widget_dialog; + video_widget_dialog = NULL; + } +} + +void TrackerDialog::poll_tracker_info() +{ + if (tracker) + { + QString to_print; + + // display caminfo + CamInfo info; + tracker->get_cam_info(&info); + to_print = QString::number(info.res_x)+"x"+QString::number(info.res_y)+" @ "+QString::number(info.fps)+" FPS"; + ui.caminfo_label->setText(to_print); + + // display pointinfo + int n_points = tracker->get_n_points(); + to_print = QString::number(n_points); + if (n_points == 3) + to_print += " OK!"; + else + to_print += " BAD!"; + ui.pointinfo_label->setText(to_print); + + // update calibration + if (trans_calib_running) trans_calib_step(); + + // update videowidget + if (video_widget_dialog) { + video_widget_dialog->get_video_widget()->update_frame_and_points(); + } + } + else + { + QString to_print = "Tracker offline"; + ui.caminfo_label->setText(to_print); + ui.pointinfo_label->setText(to_print); + } +} + +void TrackerDialog::registerTracker(ITracker *t) +{ + qDebug()<<"TrackerDialog:: Tracker registered"; + tracker = static_cast(t); + if (isVisible() & s.b->modifiedp()) + tracker->apply(s); + ui.tcalib_button->setEnabled(true); + //ui.center_button->setEnabled(true); + ui.reset_button->setEnabled(true); +} + +void TrackerDialog::unRegisterTracker() +{ + qDebug()<<"TrackerDialog:: Tracker un-registered"; + tracker = NULL; + destroy_video_widget(); + ui.tcalib_button->setEnabled(false); + //ui.center_button->setEnabled(false); + ui.reset_button->setEnabled(false); +} + +extern "C" OPENTRACK_EXPORT ITrackerDialog* CALLING_CONVENTION GetDialog( ) +{ + return new TrackerDialog; +} diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.h b/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.h index 5cb09130..a4d9c4b5 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.h +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.h @@ -1,70 +1,70 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#ifndef FTNOIR_TRACKER_PT_DIALOG_H -#define FTNOIR_TRACKER_PT_DIALOG_H - -#ifdef OPENTRACK_API -# include "facetracknoir/plugin-api.hpp" -#else -#include "..\ftnoir_tracker_base\ftnoir_tracker_base.h" -#endif -#include "ftnoir_tracker_pt_settings.h" -#include "ftnoir_tracker_pt.h" -#include "trans_calib.h" -#include "pt_video_widget.h" -#include "ui_FTNoIR_PT_Controls.h" - -#include - -//----------------------------------------------------------------------------- -// The dialog that shows up when the user presses "Settings" -class TrackerDialog : public QWidget, Ui::UICPTClientControls, public ITrackerDialog -{ - Q_OBJECT -public: - TrackerDialog(); - void registerTracker(ITracker *tracker); - void unRegisterTracker(); - void save(); - void trans_calib_step(); - -public slots: - void doCenter(); - void doReset(); - void doOK(); - void doApply(); - void doCancel(); - void do_apply_without_saving(); - - void startstop_trans_calib(bool start); - void widget_destroyed(QObject* obj); - void create_video_widget(); - void poll_tracker_info(); - void set_model(int idx); - -protected: - void destroy_video_widget(bool do_delete = true); - - void set_model_clip(); - void set_model_cap(); - void set_model_custom(); - - void settings_changed(); - - settings s; - Tracker* tracker; - VideoWidgetDialog* video_widget_dialog; - QTimer timer; - - TranslationCalibrator trans_calib; - bool trans_calib_running; - - Ui::UICPTClientControls ui; -}; - -#endif //FTNOIR_TRACKER_PT_DIALOG_H +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef FTNOIR_TRACKER_PT_DIALOG_H +#define FTNOIR_TRACKER_PT_DIALOG_H + +#ifdef OPENTRACK_API +# include "facetracknoir/plugin-api.hpp" +#else +#include "..\ftnoir_tracker_base\ftnoir_tracker_base.h" +#endif +#include "ftnoir_tracker_pt_settings.h" +#include "ftnoir_tracker_pt.h" +#include "trans_calib.h" +#include "pt_video_widget.h" +#include "ui_FTNoIR_PT_Controls.h" + +#include + +//----------------------------------------------------------------------------- +// The dialog that shows up when the user presses "Settings" +class TrackerDialog : public QWidget, Ui::UICPTClientControls, public ITrackerDialog +{ + Q_OBJECT +public: + TrackerDialog(); + void registerTracker(ITracker *tracker); + void unRegisterTracker(); + void save(); + void trans_calib_step(); + +public slots: + void doCenter(); + void doReset(); + void doOK(); + void doApply(); + void doCancel(); + void do_apply_without_saving(); + + void startstop_trans_calib(bool start); + void widget_destroyed(QObject* obj); + void create_video_widget(); + void poll_tracker_info(); + void set_model(int idx); + +protected: + void destroy_video_widget(bool do_delete = true); + + void set_model_clip(); + void set_model_cap(); + void set_model_custom(); + + void settings_changed(); + + settings s; + Tracker* tracker; + VideoWidgetDialog* video_widget_dialog; + QTimer timer; + + TranslationCalibrator trans_calib; + bool trans_calib_running; + + Ui::UICPTClientControls ui; +}; + +#endif //FTNOIR_TRACKER_PT_DIALOG_H diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_dll.cpp b/ftnoir_tracker_pt/ftnoir_tracker_pt_dll.cpp index dd7b08d6..07e1d9e7 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt_dll.cpp +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_dll.cpp @@ -1,42 +1,42 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#include "ftnoir_tracker_pt_dll.h" -#include - -//----------------------------------------------------------------------------- -void TrackerDll::getFullName(QString *strToBeFilled) -{ - *strToBeFilled = "PointTracker 1.1"; -} - -void TrackerDll::getShortName(QString *strToBeFilled) -{ - *strToBeFilled = "PointTracker"; -} - -void TrackerDll::getDescription(QString *strToBeFilled) -{ - *strToBeFilled = "Tracks a 3-point model with know geometry like Freetrack / TrackIR"; -} - -void TrackerDll::getIcon(QIcon *icon) -{ - *icon = QIcon(":/Resources/Logo_IR.png"); -} - - -#ifdef OPENTRACK_API -# include "facetracknoir/plugin-support.h" -extern "C" OPENTRACK_EXPORT Metadata* CALLING_CONVENTION GetMetadata() -#else -# pragma comment(linker, "/export:GetTrackerDll=_GetTrackerDll@0") -OPENTRACK_EXPORT ITrackerDllPtr __stdcall GetTrackerDll() -#endif -{ - return new TrackerDll; -} +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#include "ftnoir_tracker_pt_dll.h" +#include + +//----------------------------------------------------------------------------- +void TrackerDll::getFullName(QString *strToBeFilled) +{ + *strToBeFilled = "PointTracker 1.1"; +} + +void TrackerDll::getShortName(QString *strToBeFilled) +{ + *strToBeFilled = "PointTracker"; +} + +void TrackerDll::getDescription(QString *strToBeFilled) +{ + *strToBeFilled = "Tracks a 3-point model with know geometry like Freetrack / TrackIR"; +} + +void TrackerDll::getIcon(QIcon *icon) +{ + *icon = QIcon(":/Resources/Logo_IR.png"); +} + + +#ifdef OPENTRACK_API +# include "facetracknoir/plugin-support.h" +extern "C" OPENTRACK_EXPORT Metadata* CALLING_CONVENTION GetMetadata() +#else +# pragma comment(linker, "/export:GetTrackerDll=_GetTrackerDll@0") +OPENTRACK_EXPORT ITrackerDllPtr __stdcall GetTrackerDll() +#endif +{ + return new TrackerDll; +} diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_dll.h b/ftnoir_tracker_pt/ftnoir_tracker_pt_dll.h index fce7aec2..50f66a35 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt_dll.h +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_dll.h @@ -1,26 +1,26 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#if defined(OPENTRACK_API) -# include "facetracknoir/plugin-api.hpp" -#else -# include "../ftnoir_tracker_base/ftnoir_tracker_base.h" -#endif - -//----------------------------------------------------------------------------- -class TrackerDll : -#if defined(OPENTRACK_API) - public Metadata -#else - public ITrackerDll -#endif -{ - void getFullName(QString *strToBeFilled); - void getShortName(QString *strToBeFilled); - void getDescription(QString *strToBeFilled); - void getIcon(QIcon *icon); -}; +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#if defined(OPENTRACK_API) +# include "facetracknoir/plugin-api.hpp" +#else +# include "../ftnoir_tracker_base/ftnoir_tracker_base.h" +#endif + +//----------------------------------------------------------------------------- +class TrackerDll : +#if defined(OPENTRACK_API) + public Metadata +#else + public ITrackerDll +#endif +{ + void getFullName(QString *strToBeFilled); + void getShortName(QString *strToBeFilled); + void getDescription(QString *strToBeFilled); + void getIcon(QIcon *icon); +}; diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h b/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h index 1eca1e35..e4cb9ad3 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h @@ -1,81 +1,81 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#ifndef FTNOIR_TRACKER_PT_SETTINGS_H -#define FTNOIR_TRACKER_PT_SETTINGS_H - -#include -#include "point_tracker.h" - -#include "facetracknoir/options.h" -using namespace options; - -struct settings -{ - pbundle b; - value cam_index, - cam_res_x, - cam_res_y, - cam_fps, - cam_roll, - cam_pitch, - cam_yaw, - threshold, - threshold_secondary, - min_point_size, - max_point_size; - value cam_f; - - value m01_x, m01_y, m01_z; - value m02_x, m02_y, m02_z; - value dyn_pose_res, video_widget; - - value t_MH_x, t_MH_y, t_MH_z; - - value reset_time; - - value clip_ty, clip_tz, clip_by, clip_bz; - value active_model_panel, cap_x, cap_y, cap_z; - - settings() : - b(bundle("tracker-pt")), - cam_index(b, "camera-index", 0), - cam_res_x(b, "camera-res-width", 640), - cam_res_y(b, "camera-res-height", 480), - cam_fps(b, "camera-fps", 30), - cam_roll(b, "camera-roll", 1), - cam_pitch(b, "camera-pitch", 0), - cam_yaw(b, "camera-yaw", 0), - threshold(b, "threshold-primary", 128), - threshold_secondary(b, "threshold-secondary", 128), - min_point_size(b, "min-point-size", 10), - max_point_size(b, "max-point-size", 50), - cam_f(b, "camera-focal-length", 1), - m01_x(b, "m_01-x", 0), - m01_y(b, "m_01-y", 0), - m01_z(b, "m_01-z", 0), - m02_x(b, "m_02-x", 0), - m02_y(b, "m_02-y", 0), - m02_z(b, "m_02-z", 0), - dyn_pose_res(b, "dynamic-pose-resolution", false), - video_widget(b, "video-widget", true), - t_MH_x(b, "model-centroid-x", 0), - t_MH_y(b, "model-centroid-y", 0), - t_MH_z(b, "model-centroid-z", 0), - reset_time(b, "reset-time", 2000), - clip_ty(b, "clip-ty", 0), - clip_tz(b, "clip-tz", 0), - clip_by(b, "clip-by", 0), - clip_bz(b, "clip-bz", 0), - active_model_panel(b, "active-model-panel", 0), - cap_x(b, "cap-x", 0), - cap_y(b, "cap-y", 0), - cap_z(b, "cap-z", 0) - {} -}; - -#endif //FTNOIR_TRACKER_PT_SETTINGS_H +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef FTNOIR_TRACKER_PT_SETTINGS_H +#define FTNOIR_TRACKER_PT_SETTINGS_H + +#include +#include "point_tracker.h" + +#include "facetracknoir/options.h" +using namespace options; + +struct settings +{ + pbundle b; + value cam_index, + cam_res_x, + cam_res_y, + cam_fps, + cam_roll, + cam_pitch, + cam_yaw, + threshold, + threshold_secondary, + min_point_size, + max_point_size; + value cam_f; + + value m01_x, m01_y, m01_z; + value m02_x, m02_y, m02_z; + value dyn_pose_res, video_widget; + + value t_MH_x, t_MH_y, t_MH_z; + + value reset_time; + + value clip_ty, clip_tz, clip_by, clip_bz; + value active_model_panel, cap_x, cap_y, cap_z; + + settings() : + b(bundle("tracker-pt")), + cam_index(b, "camera-index", 0), + cam_res_x(b, "camera-res-width", 640), + cam_res_y(b, "camera-res-height", 480), + cam_fps(b, "camera-fps", 30), + cam_roll(b, "camera-roll", 1), + cam_pitch(b, "camera-pitch", 0), + cam_yaw(b, "camera-yaw", 0), + threshold(b, "threshold-primary", 128), + threshold_secondary(b, "threshold-secondary", 128), + min_point_size(b, "min-point-size", 10), + max_point_size(b, "max-point-size", 50), + cam_f(b, "camera-focal-length", 1), + m01_x(b, "m_01-x", 0), + m01_y(b, "m_01-y", 0), + m01_z(b, "m_01-z", 0), + m02_x(b, "m_02-x", 0), + m02_y(b, "m_02-y", 0), + m02_z(b, "m_02-z", 0), + dyn_pose_res(b, "dynamic-pose-resolution", false), + video_widget(b, "video-widget", true), + t_MH_x(b, "model-centroid-x", 0), + t_MH_y(b, "model-centroid-y", 0), + t_MH_z(b, "model-centroid-z", 0), + reset_time(b, "reset-time", 2000), + clip_ty(b, "clip-ty", 0), + clip_tz(b, "clip-tz", 0), + clip_by(b, "clip-by", 0), + clip_bz(b, "clip-bz", 0), + active_model_panel(b, "active-model-panel", 0), + cap_x(b, "cap-x", 0), + cap_y(b, "cap-y", 0), + cap_z(b, "cap-z", 0) + {} +}; + +#endif //FTNOIR_TRACKER_PT_SETTINGS_H diff --git a/ftnoir_tracker_pt/point_extractor.cpp b/ftnoir_tracker_pt/point_extractor.cpp index 968fe23e..b0e29270 100644 --- a/ftnoir_tracker_pt/point_extractor.cpp +++ b/ftnoir_tracker_pt/point_extractor.cpp @@ -1,163 +1,163 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#include "point_extractor.h" -#include - - -using namespace cv; -using namespace std; - - -PointExtractor::PointExtractor(){ - //if (!AllocConsole()){} - //else SetConsoleTitle("debug"); - //freopen("CON", "w", stdout); - //freopen("CON", "w", stderr); -} -// ---------------------------------------------------------------------------- -const vector& PointExtractor::extract_points(Mat frame, float /*dt*/, bool draw_output) -{ - const int W = frame.cols; - const int H = frame.rows; - - if (frame_last.cols != W || frame_last.rows != H) - { - frame_last = cv::Mat(); - } - - // clear old points - points.clear(); - - // convert to grayscale - Mat frame_gray; - cvtColor(frame, frame_gray, CV_RGB2GRAY); - - int secondary = threshold_secondary_val; - - // mask for everything that passes the threshold (or: the upper threshold of the hysteresis) - Mat frame_bin; - // only used if draw_output - Mat frame_bin_copy; - // mask for everything that passes - Mat frame_bin_low; - // mask for lower-threshold && combined result of last, needs to remain in scope until drawing, but is only used if secondary != 0 - Mat frame_last_and_low; - - if(secondary==0){ - threshold(frame_gray, frame_bin, threshold_val, 255, THRESH_BINARY); - }else{ - // we recombine a number of buffers, this might be slower than a single loop of per-pixel logic - // but it might as well be faster if openCV makes good use of SIMD - float t = threshold_val; - //float hyst = float(threshold_secondary_val)/512.; - //threshold(frame_gray, frame_bin, (t + ((255.-t)*hyst)), 255, THRESH_BINARY); - float hyst = float(threshold_secondary_val)/256.; - threshold(frame_gray, frame_bin, t, 255, THRESH_BINARY); - threshold(frame_gray, frame_bin_low,std::max(float(1), t - (t*hyst)), 255, THRESH_BINARY); - - if(draw_output) frame_bin.copyTo(frame_bin_copy); - if(frame_last.empty()){ - frame_bin.copyTo(frame_last); - }else{ - // keep pixels from last if they are above lower threshold - bitwise_and(frame_last, frame_bin_low, frame_last_and_low); - // union of pixels >= higher threshold and pixels >= lower threshold - bitwise_or(frame_bin, frame_last_and_low, frame_last); - frame_last.copyTo(frame_bin); - } - } - unsigned int region_size_min = 3.14*min_size*min_size/4.0; - unsigned int region_size_max = 3.14*max_size*max_size/4.0; - - int blob_index = 1; - for (int y=0; y= 255) break; - for (int x=0; x= 255) break; - - // find connected components with floodfill - if (frame_bin.at(y,x) != 255) continue; - Rect rect; - - floodFill(frame_bin, Point(x,y), Scalar(blob_index), &rect, Scalar(0), Scalar(0), FLOODFILL_FIXED_RANGE); - blob_index++; - - // calculate the size of the connected component - unsigned int region_size = 0; - for (int i=rect.y; i < (rect.y+rect.height); i++) - { - for (int j=rect.x; j < (rect.x+rect.width); j++) - { - if (frame_bin.at(i,j) != blob_index-1) continue; - region_size++; - } - } - - if (region_size < region_size_min || region_size > region_size_max) continue; - - // calculate the center of mass: - // mx = (sum_ij j*f(frame_grey_ij)) / (sum_ij f(frame_grey_ij)) - // my = ... - // f maps from [threshold,256] -> [0, 1], lower values are mapped to 0 - float m = 0; - float mx = 0; - float my = 0; - for (int i=rect.y; i < (rect.y+rect.height); i++) - { - for (int j=rect.x; j < (rect.x+rect.width); j++) - { - if (frame_bin.at(i,j) != blob_index-1) continue; - float val; - - if(secondary==0){ - val = frame_gray.at(i,j); - val = float(val - threshold_val)/(256 - threshold_val); - val = val*val; // makes it more stable (less emphasis on low values, more on the peak) - }else{ - //hysteresis point detection gets stability from ignoring pixel noise so we decidedly leave the actual pixel values out of the picture - val = frame_last.at(i,j) / 256.; - } - - m += val; - mx += j * val; - my += i * val; - } - } - - // convert to centered camera coordinate system with y axis upwards - Vec2f c; - c[0] = (mx/m - W/2)/W; - c[1] = -(my/m - H/2)/W; - //qDebug()< "< channels; - if(secondary==0){ - frame_bin.setTo(170, frame_bin); - channels.push_back(frame_gray + frame_bin); - channels.push_back(frame_gray - frame_bin); - channels.push_back(frame_gray - frame_bin); - }else{ - frame_bin_copy.setTo(120, frame_bin_copy); - frame_bin_low.setTo(90, frame_bin_low); - channels.push_back(frame_gray + frame_bin_copy); - channels.push_back(frame_gray + frame_last_and_low); - channels.push_back(frame_gray + frame_bin_low); - //channels.push_back(frame_gray + frame_bin); - } - merge(channels, frame); - } - - return points; -} +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#include "point_extractor.h" +#include + + +using namespace cv; +using namespace std; + + +PointExtractor::PointExtractor(){ + //if (!AllocConsole()){} + //else SetConsoleTitle("debug"); + //freopen("CON", "w", stdout); + //freopen("CON", "w", stderr); +} +// ---------------------------------------------------------------------------- +const vector& PointExtractor::extract_points(Mat frame, float /*dt*/, bool draw_output) +{ + const int W = frame.cols; + const int H = frame.rows; + + if (frame_last.cols != W || frame_last.rows != H) + { + frame_last = cv::Mat(); + } + + // clear old points + points.clear(); + + // convert to grayscale + Mat frame_gray; + cvtColor(frame, frame_gray, CV_RGB2GRAY); + + int secondary = threshold_secondary_val; + + // mask for everything that passes the threshold (or: the upper threshold of the hysteresis) + Mat frame_bin; + // only used if draw_output + Mat frame_bin_copy; + // mask for everything that passes + Mat frame_bin_low; + // mask for lower-threshold && combined result of last, needs to remain in scope until drawing, but is only used if secondary != 0 + Mat frame_last_and_low; + + if(secondary==0){ + threshold(frame_gray, frame_bin, threshold_val, 255, THRESH_BINARY); + }else{ + // we recombine a number of buffers, this might be slower than a single loop of per-pixel logic + // but it might as well be faster if openCV makes good use of SIMD + float t = threshold_val; + //float hyst = float(threshold_secondary_val)/512.; + //threshold(frame_gray, frame_bin, (t + ((255.-t)*hyst)), 255, THRESH_BINARY); + float hyst = float(threshold_secondary_val)/256.; + threshold(frame_gray, frame_bin, t, 255, THRESH_BINARY); + threshold(frame_gray, frame_bin_low,std::max(float(1), t - (t*hyst)), 255, THRESH_BINARY); + + if(draw_output) frame_bin.copyTo(frame_bin_copy); + if(frame_last.empty()){ + frame_bin.copyTo(frame_last); + }else{ + // keep pixels from last if they are above lower threshold + bitwise_and(frame_last, frame_bin_low, frame_last_and_low); + // union of pixels >= higher threshold and pixels >= lower threshold + bitwise_or(frame_bin, frame_last_and_low, frame_last); + frame_last.copyTo(frame_bin); + } + } + unsigned int region_size_min = 3.14*min_size*min_size/4.0; + unsigned int region_size_max = 3.14*max_size*max_size/4.0; + + int blob_index = 1; + for (int y=0; y= 255) break; + for (int x=0; x= 255) break; + + // find connected components with floodfill + if (frame_bin.at(y,x) != 255) continue; + Rect rect; + + floodFill(frame_bin, Point(x,y), Scalar(blob_index), &rect, Scalar(0), Scalar(0), FLOODFILL_FIXED_RANGE); + blob_index++; + + // calculate the size of the connected component + unsigned int region_size = 0; + for (int i=rect.y; i < (rect.y+rect.height); i++) + { + for (int j=rect.x; j < (rect.x+rect.width); j++) + { + if (frame_bin.at(i,j) != blob_index-1) continue; + region_size++; + } + } + + if (region_size < region_size_min || region_size > region_size_max) continue; + + // calculate the center of mass: + // mx = (sum_ij j*f(frame_grey_ij)) / (sum_ij f(frame_grey_ij)) + // my = ... + // f maps from [threshold,256] -> [0, 1], lower values are mapped to 0 + float m = 0; + float mx = 0; + float my = 0; + for (int i=rect.y; i < (rect.y+rect.height); i++) + { + for (int j=rect.x; j < (rect.x+rect.width); j++) + { + if (frame_bin.at(i,j) != blob_index-1) continue; + float val; + + if(secondary==0){ + val = frame_gray.at(i,j); + val = float(val - threshold_val)/(256 - threshold_val); + val = val*val; // makes it more stable (less emphasis on low values, more on the peak) + }else{ + //hysteresis point detection gets stability from ignoring pixel noise so we decidedly leave the actual pixel values out of the picture + val = frame_last.at(i,j) / 256.; + } + + m += val; + mx += j * val; + my += i * val; + } + } + + // convert to centered camera coordinate system with y axis upwards + Vec2f c; + c[0] = (mx/m - W/2)/W; + c[1] = -(my/m - H/2)/W; + //qDebug()< "< channels; + if(secondary==0){ + frame_bin.setTo(170, frame_bin); + channels.push_back(frame_gray + frame_bin); + channels.push_back(frame_gray - frame_bin); + channels.push_back(frame_gray - frame_bin); + }else{ + frame_bin_copy.setTo(120, frame_bin_copy); + frame_bin_low.setTo(90, frame_bin_low); + channels.push_back(frame_gray + frame_bin_copy); + channels.push_back(frame_gray + frame_last_and_low); + channels.push_back(frame_gray + frame_bin_low); + //channels.push_back(frame_gray + frame_bin); + } + merge(channels, frame); + } + + return points; +} diff --git a/ftnoir_tracker_pt/point_extractor.h b/ftnoir_tracker_pt/point_extractor.h index ff36f3ce..8a76747b 100644 --- a/ftnoir_tracker_pt/point_extractor.h +++ b/ftnoir_tracker_pt/point_extractor.h @@ -1,35 +1,35 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#ifndef POINTEXTRACTOR_H -#define POINTEXTRACTOR_H - -#include -#include - -// ---------------------------------------------------------------------------- -// Extracts points from an opencv image -class PointExtractor -{ -public: - // extracts points from frame and draws some processing info into frame, if draw_output is set - // dt: time since last call in seconds - // WARNING: returned reference is valid as long as object - const std::vector& extract_points(cv::Mat frame, float dt, bool draw_output); - const std::vector& get_points() { return points; } - PointExtractor(); - - int threshold_val; - int threshold_secondary_val; - int min_size, max_size; - -protected: - std::vector points; - cv::Mat frame_last; -}; - -#endif //POINTEXTRACTOR_H +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef POINTEXTRACTOR_H +#define POINTEXTRACTOR_H + +#include +#include + +// ---------------------------------------------------------------------------- +// Extracts points from an opencv image +class PointExtractor +{ +public: + // extracts points from frame and draws some processing info into frame, if draw_output is set + // dt: time since last call in seconds + // WARNING: returned reference is valid as long as object + const std::vector& extract_points(cv::Mat frame, float dt, bool draw_output); + const std::vector& get_points() { return points; } + PointExtractor(); + + int threshold_val; + int threshold_secondary_val; + int min_size, max_size; + +protected: + std::vector points; + cv::Mat frame_last; +}; + +#endif //POINTEXTRACTOR_H diff --git a/ftnoir_tracker_pt/point_tracker.cpp b/ftnoir_tracker_pt/point_tracker.cpp index f83ff437..e9892d67 100644 --- a/ftnoir_tracker_pt/point_tracker.cpp +++ b/ftnoir_tracker_pt/point_tracker.cpp @@ -1,375 +1,375 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#include "point_tracker.h" - -#include -#include -#include - -#include - -using namespace cv; -using namespace std; - -const float PI = 3.14159265358979323846f; - -// ---------------------------------------------------------------------------- -static void get_row(const Matx33f& m, int i, Vec3f& v) -{ - v[0] = m(i,0); - v[1] = m(i,1); - v[2] = m(i,2); -} - -static void set_row(Matx33f& m, int i, const Vec3f& v) -{ - m(i,0) = v[0]; - m(i,1) = v[1]; - m(i,2) = v[2]; -} - -// ---------------------------------------------------------------------------- -PointModel::PointModel(Vec3f M01, Vec3f M02) - : M01(M01), - M02(M02) -{ - // calculate u - u = M01.cross(M02); - u /= norm(u); - - // calculate projection matrix on M01,M02 plane - float s11 = M01.dot(M01); - float s12 = M01.dot(M02); - float s22 = M02.dot(M02); - P = 1.0/(s11*s22-s12*s12) * Matx22f(s22, -s12, - -s12, s11); - - // calculate d and d_order for simple freetrack-like point correspondence - vector points; - points.push_back(Vec2f(0,0)); - points.push_back(Vec2f(M01[0], M01[1])); - points.push_back(Vec2f(M02[0], M02[1])); - // fit line to orthographically projected points - // ERROR: yields wrong results with colinear points?! - /* - Vec4f line; - fitLine(points, line, CV_DIST_L2, 0, 0.01, 0.01); - d[0] = line[0]; d[1] = line[1]; - */ - // TODO: fix this - d = Vec2f(M01[0]-M02[0], M01[1]-M02[1]); - - // sort model points - get_d_order(points, d_order); -} - -#ifdef OPENTRACK_API -static bool d_vals_sort(const pair a, const pair b) -{ - return a.first < b.first; -} -#endif - -void PointModel::get_d_order(const std::vector& points, int d_order[]) const -{ - // get sort indices with respect to d scalar product - vector< pair > d_vals; - for (unsigned i = 0; i(d.dot(points[i]), i)); - - std::sort(d_vals.begin(), - d_vals.end(), -#ifdef OPENTRACK_API - d_vals_sort -#else - comp -#endif - ); - - for (unsigned i = 0; i& points, float f, float dt) -{ - if (!dynamic_pose_resolution) init_phase = true; - - dt_valid += dt; - // if there was no valid tracking result for too long, do a reset - if (dt_valid > dt_reset) - { - //qDebug()<<"dt_valid "< dt_reset "<& points, float f) -{ - if (init_phase) { - // We do a simple freetrack-like sorting in the init phase... - // sort points - int point_d_order[PointModel::N_POINTS]; - point_model->get_d_order(points, point_d_order); - - // set correspondences - for (int i=0; id_order[i]] = points[point_d_order[i]]; - } - } - else { - // ... otherwise we look at the distance to the projection of the expected model points - // project model points under current pose - p_exp[0] = project(Vec3f(0,0,0), f); - p_exp[1] = project(point_model->M01, f); - p_exp[2] = project(point_model->M02, f); - - // set correspondences by minimum distance to projected model point - bool point_taken[PointModel::N_POINTS]; - for (int i=0; iM01)/Z0; - epsilon_2 = k.dot(point_model->M02)/Z0; - - // vector of scalar products and - Vec2f I0_M0i(p[1][0]*(1.0 + epsilon_1) - p[0][0], - p[2][0]*(1.0 + epsilon_2) - p[0][0]); - Vec2f J0_M0i(p[1][1]*(1.0 + epsilon_1) - p[0][1], - p[2][1]*(1.0 + epsilon_2) - p[0][1]); - - // construct projection of I, J onto M0i plane: I0 and J0 - I0_coeff = point_model->P * I0_M0i; - J0_coeff = point_model->P * J0_M0i; - I0 = I0_coeff[0]*point_model->M01 + I0_coeff[1]*point_model->M02; - J0 = J0_coeff[0]*point_model->M01 + J0_coeff[1]*point_model->M02; - - // calculate u component of I, J - float II0 = I0.dot(I0); - float IJ0 = I0.dot(J0); - float JJ0 = J0.dot(J0); - float rho, theta; - if (JJ0 == II0) { - rho = sqrt(abs(2*IJ0)); - theta = -PI/4; - if (IJ0<0) theta *= -1; - } - else { - rho = sqrt(sqrt( (JJ0-II0)*(JJ0-II0) + 4*IJ0*IJ0 )); - theta = atan( -2*IJ0 / (JJ0-II0) ); - if (JJ0 - II0 < 0) theta += PI; - theta /= 2; - } - - // construct the two solutions - I_1 = I0 + rho*cos(theta)*point_model->u; - I_2 = I0 - rho*cos(theta)*point_model->u; - - J_1 = J0 + rho*sin(theta)*point_model->u; - J_2 = J0 - rho*sin(theta)*point_model->u; - - float norm_const = 1.0/norm(I_1); // all have the same norm - - // create rotation matrices - I_1 *= norm_const; J_1 *= norm_const; - I_2 *= norm_const; J_2 *= norm_const; - - set_row(R_1, 0, I_1); - set_row(R_1, 1, J_1); - set_row(R_1, 2, I_1.cross(J_1)); - - set_row(R_2, 0, I_2); - set_row(R_2, 1, J_2); - set_row(R_2, 2, I_2.cross(J_2)); - - // the single translation solution - Z0 = norm_const * f; - - // pick the rotation solution closer to the expected one - // in simple metric d(A,B) = || I - A * B^T || - float R_1_deviation = norm(Matx33f::eye() - R_expected * R_1.t()); - float R_2_deviation = norm(Matx33f::eye() - R_expected * R_2.t()); - - if (R_1_deviation < R_2_deviation) - R_current = &R_1; - else - R_current = &R_2; - - get_row(*R_current, 2, k); - - // check for convergence condition - if (abs(epsilon_1 - old_epsilon_1) + abs(epsilon_2 - old_epsilon_2) < EPS_THRESHOLD) - break; - old_epsilon_1 = epsilon_1; - old_epsilon_2 = epsilon_2; - } - - // apply results - X_CM.R = *R_current; - X_CM.t[0] = p[0][0] * Z0/f; - X_CM.t[1] = p[0][1] * Z0/f; - X_CM.t[2] = Z0; - - return i; - - //Rodrigues(X_CM.R, r); - //qDebug()<<"iter: "< +#include +#include + +#include + +using namespace cv; +using namespace std; + +const float PI = 3.14159265358979323846f; + +// ---------------------------------------------------------------------------- +static void get_row(const Matx33f& m, int i, Vec3f& v) +{ + v[0] = m(i,0); + v[1] = m(i,1); + v[2] = m(i,2); +} + +static void set_row(Matx33f& m, int i, const Vec3f& v) +{ + m(i,0) = v[0]; + m(i,1) = v[1]; + m(i,2) = v[2]; +} + +// ---------------------------------------------------------------------------- +PointModel::PointModel(Vec3f M01, Vec3f M02) + : M01(M01), + M02(M02) +{ + // calculate u + u = M01.cross(M02); + u /= norm(u); + + // calculate projection matrix on M01,M02 plane + float s11 = M01.dot(M01); + float s12 = M01.dot(M02); + float s22 = M02.dot(M02); + P = 1.0/(s11*s22-s12*s12) * Matx22f(s22, -s12, + -s12, s11); + + // calculate d and d_order for simple freetrack-like point correspondence + vector points; + points.push_back(Vec2f(0,0)); + points.push_back(Vec2f(M01[0], M01[1])); + points.push_back(Vec2f(M02[0], M02[1])); + // fit line to orthographically projected points + // ERROR: yields wrong results with colinear points?! + /* + Vec4f line; + fitLine(points, line, CV_DIST_L2, 0, 0.01, 0.01); + d[0] = line[0]; d[1] = line[1]; + */ + // TODO: fix this + d = Vec2f(M01[0]-M02[0], M01[1]-M02[1]); + + // sort model points + get_d_order(points, d_order); +} + +#ifdef OPENTRACK_API +static bool d_vals_sort(const pair a, const pair b) +{ + return a.first < b.first; +} +#endif + +void PointModel::get_d_order(const std::vector& points, int d_order[]) const +{ + // get sort indices with respect to d scalar product + vector< pair > d_vals; + for (unsigned i = 0; i(d.dot(points[i]), i)); + + std::sort(d_vals.begin(), + d_vals.end(), +#ifdef OPENTRACK_API + d_vals_sort +#else + comp +#endif + ); + + for (unsigned i = 0; i& points, float f, float dt) +{ + if (!dynamic_pose_resolution) init_phase = true; + + dt_valid += dt; + // if there was no valid tracking result for too long, do a reset + if (dt_valid > dt_reset) + { + //qDebug()<<"dt_valid "< dt_reset "<& points, float f) +{ + if (init_phase) { + // We do a simple freetrack-like sorting in the init phase... + // sort points + int point_d_order[PointModel::N_POINTS]; + point_model->get_d_order(points, point_d_order); + + // set correspondences + for (int i=0; id_order[i]] = points[point_d_order[i]]; + } + } + else { + // ... otherwise we look at the distance to the projection of the expected model points + // project model points under current pose + p_exp[0] = project(Vec3f(0,0,0), f); + p_exp[1] = project(point_model->M01, f); + p_exp[2] = project(point_model->M02, f); + + // set correspondences by minimum distance to projected model point + bool point_taken[PointModel::N_POINTS]; + for (int i=0; iM01)/Z0; + epsilon_2 = k.dot(point_model->M02)/Z0; + + // vector of scalar products and + Vec2f I0_M0i(p[1][0]*(1.0 + epsilon_1) - p[0][0], + p[2][0]*(1.0 + epsilon_2) - p[0][0]); + Vec2f J0_M0i(p[1][1]*(1.0 + epsilon_1) - p[0][1], + p[2][1]*(1.0 + epsilon_2) - p[0][1]); + + // construct projection of I, J onto M0i plane: I0 and J0 + I0_coeff = point_model->P * I0_M0i; + J0_coeff = point_model->P * J0_M0i; + I0 = I0_coeff[0]*point_model->M01 + I0_coeff[1]*point_model->M02; + J0 = J0_coeff[0]*point_model->M01 + J0_coeff[1]*point_model->M02; + + // calculate u component of I, J + float II0 = I0.dot(I0); + float IJ0 = I0.dot(J0); + float JJ0 = J0.dot(J0); + float rho, theta; + if (JJ0 == II0) { + rho = sqrt(abs(2*IJ0)); + theta = -PI/4; + if (IJ0<0) theta *= -1; + } + else { + rho = sqrt(sqrt( (JJ0-II0)*(JJ0-II0) + 4*IJ0*IJ0 )); + theta = atan( -2*IJ0 / (JJ0-II0) ); + if (JJ0 - II0 < 0) theta += PI; + theta /= 2; + } + + // construct the two solutions + I_1 = I0 + rho*cos(theta)*point_model->u; + I_2 = I0 - rho*cos(theta)*point_model->u; + + J_1 = J0 + rho*sin(theta)*point_model->u; + J_2 = J0 - rho*sin(theta)*point_model->u; + + float norm_const = 1.0/norm(I_1); // all have the same norm + + // create rotation matrices + I_1 *= norm_const; J_1 *= norm_const; + I_2 *= norm_const; J_2 *= norm_const; + + set_row(R_1, 0, I_1); + set_row(R_1, 1, J_1); + set_row(R_1, 2, I_1.cross(J_1)); + + set_row(R_2, 0, I_2); + set_row(R_2, 1, J_2); + set_row(R_2, 2, I_2.cross(J_2)); + + // the single translation solution + Z0 = norm_const * f; + + // pick the rotation solution closer to the expected one + // in simple metric d(A,B) = || I - A * B^T || + float R_1_deviation = norm(Matx33f::eye() - R_expected * R_1.t()); + float R_2_deviation = norm(Matx33f::eye() - R_expected * R_2.t()); + + if (R_1_deviation < R_2_deviation) + R_current = &R_1; + else + R_current = &R_2; + + get_row(*R_current, 2, k); + + // check for convergence condition + if (abs(epsilon_1 - old_epsilon_1) + abs(epsilon_2 - old_epsilon_2) < EPS_THRESHOLD) + break; + old_epsilon_1 = epsilon_1; + old_epsilon_2 = epsilon_2; + } + + // apply results + X_CM.R = *R_current; + X_CM.t[0] = p[0][0] * Z0/f; + X_CM.t[1] = p[0][1] * Z0/f; + X_CM.t[2] = Z0; + + return i; + + //Rodrigues(X_CM.R, r); + //qDebug()<<"iter: "< -#ifndef OPENTRACK_API -# include -#else +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef POINTTRACKER_H +#define POINTTRACKER_H + +#include +#ifndef OPENTRACK_API +# include +#else # include -#endif -#include - -// ---------------------------------------------------------------------------- -// Affine frame trafo -class FrameTrafo -{ -public: - FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {} - FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {} - - cv::Matx33f R; - cv::Vec3f t; -}; - -inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) -{ - return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t); -} - -inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y) -{ - return FrameTrafo(X*Y.R, X*Y.t); -} - -inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y) -{ - return FrameTrafo(X.R*Y, X.t); -} - -inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v) -{ - return X.R*v + X.t; -} - - -// ---------------------------------------------------------------------------- -// Describes a 3-point model -// nomenclature as in -// [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"] -class PointModel -{ - friend class PointTracker; -public: - static const int N_POINTS = 3; - - PointModel(cv::Vec3f M01, cv::Vec3f M02); - - const cv::Vec3f& get_M01() const { return M01; }; - const cv::Vec3f& get_M02() const { return M02; }; - -protected: - cv::Vec3f M01; // M01 in model frame - cv::Vec3f M02; // M02 in model frame - - cv::Vec3f u; // unit vector perpendicular to M01,M02-plane - - cv::Matx22f P; - - cv::Vec2f d; // discriminant vector for point correspondence - int d_order[3]; // sorting of projected model points with respect to d scalar product - - void get_d_order(const std::vector& points, int d_order[]) const; -}; - -// ---------------------------------------------------------------------------- -// Tracks a 3-point model -// implementing the POSIT algorithm for coplanar points as presented in -// [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"] -class PointTracker -{ -public: - PointTracker(); - - // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5) - // f : (focal length)/(sensor width) - // dt : time since last call - bool track(const std::vector& points, float f, float dt); - std::shared_ptr point_model; - - bool dynamic_pose_resolution; - float dt_reset; - - FrameTrafo get_pose() const { return X_CM; } - void reset(); - -protected: - inline cv::Vec2f project(const cv::Vec3f& v_M, float f) - { - cv::Vec3f v_C = X_CM * v_M; - return cv::Vec2f(f*v_C[0]/v_C[2], f*v_C[1]/v_C[2]); - } - - bool find_correspondences(const std::vector& points, float f); - - cv::Vec2f p[PointModel::N_POINTS]; // the points in model order - cv::Vec2f p_exp[PointModel::N_POINTS]; // the expected point positions - - void predict(float dt); - void update_velocities(float dt); - void reset_velocities(); - - - int POSIT(float f); // The POSIT algorithm, returns the number of iterations - - bool init_phase; - float dt_valid; // time since last valid tracking result - cv::Vec3f v_t; // velocities - cv::Vec3f v_r; - FrameTrafo X_CM; // trafo from model to camera - FrameTrafo X_CM_old; -}; - -#endif //POINTTRACKER_H +#endif +#include + +// ---------------------------------------------------------------------------- +// Affine frame trafo +class FrameTrafo +{ +public: + FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {} + FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {} + + cv::Matx33f R; + cv::Vec3f t; +}; + +inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) +{ + return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t); +} + +inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y) +{ + return FrameTrafo(X*Y.R, X*Y.t); +} + +inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y) +{ + return FrameTrafo(X.R*Y, X.t); +} + +inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v) +{ + return X.R*v + X.t; +} + + +// ---------------------------------------------------------------------------- +// Describes a 3-point model +// nomenclature as in +// [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"] +class PointModel +{ + friend class PointTracker; +public: + static const int N_POINTS = 3; + + PointModel(cv::Vec3f M01, cv::Vec3f M02); + + const cv::Vec3f& get_M01() const { return M01; }; + const cv::Vec3f& get_M02() const { return M02; }; + +protected: + cv::Vec3f M01; // M01 in model frame + cv::Vec3f M02; // M02 in model frame + + cv::Vec3f u; // unit vector perpendicular to M01,M02-plane + + cv::Matx22f P; + + cv::Vec2f d; // discriminant vector for point correspondence + int d_order[3]; // sorting of projected model points with respect to d scalar product + + void get_d_order(const std::vector& points, int d_order[]) const; +}; + +// ---------------------------------------------------------------------------- +// Tracks a 3-point model +// implementing the POSIT algorithm for coplanar points as presented in +// [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"] +class PointTracker +{ +public: + PointTracker(); + + // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5) + // f : (focal length)/(sensor width) + // dt : time since last call + bool track(const std::vector& points, float f, float dt); + std::shared_ptr point_model; + + bool dynamic_pose_resolution; + float dt_reset; + + FrameTrafo get_pose() const { return X_CM; } + void reset(); + +protected: + inline cv::Vec2f project(const cv::Vec3f& v_M, float f) + { + cv::Vec3f v_C = X_CM * v_M; + return cv::Vec2f(f*v_C[0]/v_C[2], f*v_C[1]/v_C[2]); + } + + bool find_correspondences(const std::vector& points, float f); + + cv::Vec2f p[PointModel::N_POINTS]; // the points in model order + cv::Vec2f p_exp[PointModel::N_POINTS]; // the expected point positions + + void predict(float dt); + void update_velocities(float dt); + void reset_velocities(); + + + int POSIT(float f); // The POSIT algorithm, returns the number of iterations + + bool init_phase; + float dt_valid; // time since last valid tracking result + cv::Vec3f v_t; // velocities + cv::Vec3f v_r; + FrameTrafo X_CM; // trafo from model to camera + FrameTrafo X_CM_old; +}; + +#endif //POINTTRACKER_H diff --git a/ftnoir_tracker_pt/pt_video_widget.cpp b/ftnoir_tracker_pt/pt_video_widget.cpp index 02817cbf..cb3dc48e 100644 --- a/ftnoir_tracker_pt/pt_video_widget.cpp +++ b/ftnoir_tracker_pt/pt_video_widget.cpp @@ -1,64 +1,64 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - * - * 20130312, WVR: Add 7 lines to resizeGL after resize_frame. This should lower CPU-load. - */ - -#include "pt_video_widget.h" - -#include -#include - -using namespace cv; -using namespace std; - -void PTVideoWidget::update_image(const cv::Mat& frame) -{ - QMutexLocker foo(&mtx); - _frame = frame.clone(); - freshp = true; -} - -// ---------------------------------------------------------------------------- -VideoWidgetDialog::VideoWidgetDialog(QWidget *parent, FrameProvider* provider) - : QDialog(parent), - video_widget(NULL) -{ - const int VIDEO_FRAME_WIDTH = 640; - const int VIDEO_FRAME_HEIGHT = 480; - - video_widget = new PTVideoWidget(this, provider); - - QHBoxLayout* layout = new QHBoxLayout(); - layout->setContentsMargins(0, 0, 0, 0); - layout->addWidget(video_widget); - if (this->layout()) delete this->layout(); - setLayout(layout); - resize(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT); -} - -void PTVideoWidget::update_and_repaint() -{ - QMutexLocker foo(&mtx); - if (_frame.empty() || !freshp) - return; - freshp = false; - QImage qframe = QImage(_frame.cols, _frame.rows, QImage::Format_RGB888); - uchar* data = qframe.bits(); - const int pitch = qframe.bytesPerLine(); - for (int y = 0; y < _frame.rows; y++) - for (int x = 0; x < _frame.cols; x++) - { - const auto& elt = _frame.at(y, x); - const cv::Scalar elt2 = static_cast(elt); - data[y * pitch + x * 3 + 0] = elt2.val[2]; - data[y * pitch + x * 3 + 1] = elt2.val[1]; - data[y * pitch + x * 3 + 2] = elt2.val[0]; - } - qframe = qframe.scaled(size(), Qt::IgnoreAspectRatio, Qt::FastTransformation); - texture = qframe; - update(); -} +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * 20130312, WVR: Add 7 lines to resizeGL after resize_frame. This should lower CPU-load. + */ + +#include "pt_video_widget.h" + +#include +#include + +using namespace cv; +using namespace std; + +void PTVideoWidget::update_image(const cv::Mat& frame) +{ + QMutexLocker foo(&mtx); + _frame = frame.clone(); + freshp = true; +} + +// ---------------------------------------------------------------------------- +VideoWidgetDialog::VideoWidgetDialog(QWidget *parent, FrameProvider* provider) + : QDialog(parent), + video_widget(NULL) +{ + const int VIDEO_FRAME_WIDTH = 640; + const int VIDEO_FRAME_HEIGHT = 480; + + video_widget = new PTVideoWidget(this, provider); + + QHBoxLayout* layout = new QHBoxLayout(); + layout->setContentsMargins(0, 0, 0, 0); + layout->addWidget(video_widget); + if (this->layout()) delete this->layout(); + setLayout(layout); + resize(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT); +} + +void PTVideoWidget::update_and_repaint() +{ + QMutexLocker foo(&mtx); + if (_frame.empty() || !freshp) + return; + freshp = false; + QImage qframe = QImage(_frame.cols, _frame.rows, QImage::Format_RGB888); + uchar* data = qframe.bits(); + const int pitch = qframe.bytesPerLine(); + for (int y = 0; y < _frame.rows; y++) + for (int x = 0; x < _frame.cols; x++) + { + const auto& elt = _frame.at(y, x); + const cv::Scalar elt2 = static_cast(elt); + data[y * pitch + x * 3 + 0] = elt2.val[2]; + data[y * pitch + x * 3 + 1] = elt2.val[1]; + data[y * pitch + x * 3 + 2] = elt2.val[0]; + } + qframe = qframe.scaled(size(), Qt::IgnoreAspectRatio, Qt::FastTransformation); + texture = qframe; + update(); +} diff --git a/ftnoir_tracker_pt/pt_video_widget.h b/ftnoir_tracker_pt/pt_video_widget.h index f7de4db8..1be5f5f2 100644 --- a/ftnoir_tracker_pt/pt_video_widget.h +++ b/ftnoir_tracker_pt/pt_video_widget.h @@ -1,71 +1,71 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#pragma once - -#include "frame_observer.h" -#include -#include -#include -#include -#ifndef OPENTRACK_API -# include -# include -#else +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#pragma once + +#include "frame_observer.h" +#include +#include +#include +#include +#ifndef OPENTRACK_API +# include +# include +#else # include -# if defined(_WIN32) -# include -# endif -#endif -#include -#include -#include - -class PTVideoWidget : public QWidget, public FrameObserver -{ - Q_OBJECT - -public: - PTVideoWidget(QWidget *parent, FrameProvider* provider) : - QWidget(parent), - /* to avoid linker errors */ FrameObserver(provider), - freshp(false) - { - connect(&timer, SIGNAL(timeout()), this, SLOT(update_and_repaint())); - timer.start(40); - } - void update_image(const cv::Mat &frame); - void update_frame_and_points() {} -protected slots: - void paintEvent( QPaintEvent* e ) { - QMutexLocker foo(&mtx); - QPainter painter(this); - painter.drawImage(e->rect(), texture); - } - void update_and_repaint(); -private: - QMutex mtx; - QImage texture; - QTimer timer; - cv::Mat _frame; - bool freshp; -}; - -// ---------------------------------------------------------------------------- -// A VideoWidget embedded in a dialog frame -class VideoWidgetDialog : public QDialog -{ - Q_OBJECT -public: - VideoWidgetDialog(QWidget *parent, FrameProvider* provider); - virtual ~VideoWidgetDialog() {} - - PTVideoWidget* get_video_widget() { return video_widget; } - -private: - PTVideoWidget* video_widget; -}; +# if defined(_WIN32) +# include +# endif +#endif +#include +#include +#include + +class PTVideoWidget : public QWidget, public FrameObserver +{ + Q_OBJECT + +public: + PTVideoWidget(QWidget *parent, FrameProvider* provider) : + QWidget(parent), + /* to avoid linker errors */ FrameObserver(provider), + freshp(false) + { + connect(&timer, SIGNAL(timeout()), this, SLOT(update_and_repaint())); + timer.start(40); + } + void update_image(const cv::Mat &frame); + void update_frame_and_points() {} +protected slots: + void paintEvent( QPaintEvent* e ) { + QMutexLocker foo(&mtx); + QPainter painter(this); + painter.drawImage(e->rect(), texture); + } + void update_and_repaint(); +private: + QMutex mtx; + QImage texture; + QTimer timer; + cv::Mat _frame; + bool freshp; +}; + +// ---------------------------------------------------------------------------- +// A VideoWidget embedded in a dialog frame +class VideoWidgetDialog : public QDialog +{ + Q_OBJECT +public: + VideoWidgetDialog(QWidget *parent, FrameProvider* provider); + virtual ~VideoWidgetDialog() {} + + PTVideoWidget* get_video_widget() { return video_widget; } + +private: + PTVideoWidget* video_widget; +}; diff --git a/ftnoir_tracker_pt/trans_calib.cpp b/ftnoir_tracker_pt/trans_calib.cpp index 9b75a1b6..729a0b7f 100644 --- a/ftnoir_tracker_pt/trans_calib.cpp +++ b/ftnoir_tracker_pt/trans_calib.cpp @@ -1,44 +1,44 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#include "trans_calib.h" - -using namespace cv; - -//----------------------------------------------------------------------------- -TranslationCalibrator::TranslationCalibrator() -{ - reset(); -} - -void TranslationCalibrator::reset() -{ - P = Matx66f::zeros(); - y = Vec6f(0,0,0, 0,0,0); -} - -void TranslationCalibrator::update(const Matx33f& R_CM_k, const Vec3f& t_CM_k) -{ - Matx H_k_T = Matx::zeros(); - for (int i=0; i<3; ++i) { - for (int j=0; j<3; ++j) { - H_k_T(i,j) = R_CM_k(j,i); - } - } - for (int i=0; i<3; ++i) - { - H_k_T(3+i,i) = 1.0; - } - P += H_k_T * H_k_T.t(); - y += H_k_T * t_CM_k; -} - -Vec3f TranslationCalibrator::get_estimate() -{ - Vec6f x = P.inv() * y; - return Vec3f(-x[0], -x[1], -x[2]); +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#include "trans_calib.h" + +using namespace cv; + +//----------------------------------------------------------------------------- +TranslationCalibrator::TranslationCalibrator() +{ + reset(); +} + +void TranslationCalibrator::reset() +{ + P = Matx66f::zeros(); + y = Vec6f(0,0,0, 0,0,0); +} + +void TranslationCalibrator::update(const Matx33f& R_CM_k, const Vec3f& t_CM_k) +{ + Matx H_k_T = Matx::zeros(); + for (int i=0; i<3; ++i) { + for (int j=0; j<3; ++j) { + H_k_T(i,j) = R_CM_k(j,i); + } + } + for (int i=0; i<3; ++i) + { + H_k_T(3+i,i) = 1.0; + } + P += H_k_T * H_k_T.t(); + y += H_k_T * t_CM_k; +} + +Vec3f TranslationCalibrator::get_estimate() +{ + Vec6f x = P.inv() * y; + return Vec3f(-x[0], -x[1], -x[2]); } \ No newline at end of file diff --git a/ftnoir_tracker_pt/trans_calib.h b/ftnoir_tracker_pt/trans_calib.h index f2521690..609c9af1 100644 --- a/ftnoir_tracker_pt/trans_calib.h +++ b/ftnoir_tracker_pt/trans_calib.h @@ -1,39 +1,39 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#ifndef TRANSCALIB_H -#define TRANSCALIB_H - -#include - -//----------------------------------------------------------------------------- -// Calibrates the translation from head to model = t_MH -// by recursive least squares / -// kalman filter in information form with identity noise covariance -// measurement equation when head position = t_CH is fixed: -// (R_CM_k , Id)*(-t_MH, t_CH) = t_CM_k - -class TranslationCalibrator -{ -public: - TranslationCalibrator(); - - // reset the calibration process - void reset(); - - // update the current estimate - void update(const cv::Matx33f& R_CM_k, const cv::Vec3f& t_CM_k); - - // get the current estimate for t_MH - cv::Vec3f get_estimate(); - -protected: - cv::Matx66f P; // normalized precision matrix = inverse covariance - cv::Vec6f y; // P*(-t_MH, t_CH) -}; - +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#ifndef TRANSCALIB_H +#define TRANSCALIB_H + +#include + +//----------------------------------------------------------------------------- +// Calibrates the translation from head to model = t_MH +// by recursive least squares / +// kalman filter in information form with identity noise covariance +// measurement equation when head position = t_CH is fixed: +// (R_CM_k , Id)*(-t_MH, t_CH) = t_CM_k + +class TranslationCalibrator +{ +public: + TranslationCalibrator(); + + // reset the calibration process + void reset(); + + // update the current estimate + void update(const cv::Matx33f& R_CM_k, const cv::Vec3f& t_CM_k); + + // get the current estimate for t_MH + cv::Vec3f get_estimate(); + +protected: + cv::Matx66f P; // normalized precision matrix = inverse covariance + cv::Vec6f y; // P*(-t_MH, t_CH) +}; + #endif //TRANSCALIB_H \ No newline at end of file -- cgit v1.2.3 From 01f1a5f2b9ed1846423eee0336450f32b836536b Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sat, 27 Sep 2014 20:04:47 +0200 Subject: make stuff private, not protected clang generates warnings for unused private stuff, so use that. --- facetracknoir/rotation.h | 60 ++++++++++++++++++------------------- ftnoir_tracker_aruco/trans_calib.h | 22 +++++++------- ftnoir_tracker_pt/camera.h | 53 ++++++++++++++++---------------- ftnoir_tracker_pt/point_extractor.h | 26 ++++++++-------- ftnoir_tracker_pt/trans_calib.h | 22 +++++++------- 5 files changed, 92 insertions(+), 91 deletions(-) (limited to 'ftnoir_tracker_pt/trans_calib.h') diff --git a/facetracknoir/rotation.h b/facetracknoir/rotation.h index d40fb6cf..5ff5ce61 100644 --- a/facetracknoir/rotation.h +++ b/facetracknoir/rotation.h @@ -11,40 +11,40 @@ class RotationType { public: - RotationType() : a(1.0),b(0.0),c(0.0),d(0.0) {} - RotationType(double yaw, double pitch, double roll) { fromEuler(yaw, pitch, roll); } - RotationType(double a, double b, double c, double d) : a(a),b(b),c(c),d(d) {} - - RotationType inv(){ - return RotationType(a,-b,-c, -d); - } - - - // conversions - // see http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles - void fromEuler(double yaw, double pitch, double roll) - { - - double sin_phi = sin(roll/2.0); - double cos_phi = cos(roll/2.0); - double sin_the = sin(pitch/2.0); - double cos_the = cos(pitch/2.0); - double sin_psi = sin(yaw/2.0); - double cos_psi = cos(yaw/2.0); - - a = cos_phi*cos_the*cos_psi + sin_phi*sin_the*sin_psi; - b = sin_phi*cos_the*cos_psi - cos_phi*sin_the*sin_psi; - c = cos_phi*sin_the*cos_psi + sin_phi*cos_the*sin_psi; - d = cos_phi*cos_the*sin_psi - sin_phi*sin_the*cos_psi; - } - + RotationType() : a(1.0),b(0.0),c(0.0),d(0.0) {} + RotationType(double yaw, double pitch, double roll) { fromEuler(yaw, pitch, roll); } + RotationType(double a, double b, double c, double d) : a(a),b(b),c(c),d(d) {} + + RotationType inv(){ + return RotationType(a,-b,-c, -d); + } + + + // conversions + // see http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles + void fromEuler(double yaw, double pitch, double roll) + { + + double sin_phi = sin(roll/2.0); + double cos_phi = cos(roll/2.0); + double sin_the = sin(pitch/2.0); + double cos_the = cos(pitch/2.0); + double sin_psi = sin(yaw/2.0); + double cos_psi = cos(yaw/2.0); + + a = cos_phi*cos_the*cos_psi + sin_phi*sin_the*sin_psi; + b = sin_phi*cos_the*cos_psi - cos_phi*sin_the*sin_psi; + c = cos_phi*sin_the*cos_psi + sin_phi*cos_the*sin_psi; + d = cos_phi*cos_the*sin_psi - sin_phi*sin_the*cos_psi; + } + void toEuler(double& yaw, double& pitch, double& roll) const { roll = atan2(2.0*(a*b + c*d), 1.0 - 2.0*(b*b + c*c)); pitch = asin(2.0*(a*c - b*d)); yaw = atan2(2.0*(a*d + b*c), 1.0 - 2.0*(c*c + d*d)); } - + const RotationType operator*(const RotationType& B) const { const RotationType& A = *this; @@ -54,6 +54,6 @@ public: A.a*B.d + A.b*B.c - A.c*B.b + A.d*B.a); } -protected: - double a,b,c,d; // quaternion coefficients +private: + double a,b,c,d; // quaternion coefficients }; diff --git a/ftnoir_tracker_aruco/trans_calib.h b/ftnoir_tracker_aruco/trans_calib.h index 609c9af1..5c321b2c 100644 --- a/ftnoir_tracker_aruco/trans_calib.h +++ b/ftnoir_tracker_aruco/trans_calib.h @@ -20,20 +20,20 @@ class TranslationCalibrator { public: - TranslationCalibrator(); + TranslationCalibrator(); - // reset the calibration process - void reset(); + // reset the calibration process + void reset(); - // update the current estimate - void update(const cv::Matx33f& R_CM_k, const cv::Vec3f& t_CM_k); + // update the current estimate + void update(const cv::Matx33f& R_CM_k, const cv::Vec3f& t_CM_k); - // get the current estimate for t_MH - cv::Vec3f get_estimate(); + // get the current estimate for t_MH + cv::Vec3f get_estimate(); -protected: - cv::Matx66f P; // normalized precision matrix = inverse covariance - cv::Vec6f y; // P*(-t_MH, t_CH) +private: + cv::Matx66f P; // normalized precision matrix = inverse covariance + cv::Vec6f y; // P*(-t_MH, t_CH) }; -#endif //TRANSCALIB_H \ No newline at end of file +#endif //TRANSCALIB_H diff --git a/ftnoir_tracker_pt/camera.h b/ftnoir_tracker_pt/camera.h index 86cafd42..2bca88a1 100644 --- a/ftnoir_tracker_pt/camera.h +++ b/ftnoir_tracker_pt/camera.h @@ -25,11 +25,11 @@ void get_camera_device_names(std::vector& device_names); // ---------------------------------------------------------------------------- struct CamInfo { - CamInfo() : res_x(0), res_y(0), fps(0) {} + CamInfo() : res_x(0), res_y(0), fps(0) {} - int res_x; - int res_y; - int fps; + int res_x; + int res_y; + int fps; }; // ---------------------------------------------------------------------------- @@ -65,7 +65,7 @@ protected: virtual void _set_device_index() = 0; virtual void _set_fps() = 0; virtual void _set_res() = 0; - +private: float dt_valid; float dt_mean; int desired_index; @@ -82,18 +82,19 @@ inline Camera::~Camera() {} class CVCamera : public Camera { public: - CVCamera() : cap(NULL) {} - ~CVCamera() { stop(); } + CVCamera() : cap(NULL) {} + ~CVCamera() { stop(); } - void start() override; - void stop() override; + void start() override; + void stop() override; protected: - bool _get_frame(cv::Mat* frame) override; - void _set_fps() override; - void _set_res() override; + bool _get_frame(cv::Mat* frame) override; + void _set_fps() override; + void _set_res() override; void _set_device_index() override; +private: cv::VideoCapture* cap; }; #else @@ -102,21 +103,21 @@ protected: class VICamera : public Camera { public: - VICamera(); - ~VICamera() { stop(); } + VICamera(); + ~VICamera() { stop(); } - virtual void start(); - virtual void stop(); + virtual void start(); + virtual void stop(); protected: - virtual bool _get_frame(cv::Mat* frame); - virtual void _set_device_index(); - virtual void _set_fps(); - virtual void _set_res(); - - videoInput VI; - cv::Mat new_frame; - unsigned char* frame_buffer; + virtual bool _get_frame(cv::Mat* frame); + virtual void _set_device_index(); + virtual void _set_fps(); + virtual void _set_res(); + + videoInput VI; + cv::Mat new_frame; + unsigned char* frame_buffer; }; #endif @@ -128,12 +129,12 @@ enum RotationType }; // ---------------------------------------------------------------------------- -class FrameRotation +class FrameRotation { public: RotationType rotation; - cv::Mat rotate_frame(cv::Mat frame); + cv::Mat rotate_frame(cv::Mat frame); }; #endif //CAMERA_H diff --git a/ftnoir_tracker_pt/point_extractor.h b/ftnoir_tracker_pt/point_extractor.h index 21d548af..3ef82900 100644 --- a/ftnoir_tracker_pt/point_extractor.h +++ b/ftnoir_tracker_pt/point_extractor.h @@ -15,21 +15,21 @@ // Extracts points from an opencv image class PointExtractor { -public: - // extracts points from frame and draws some processing info into frame, if draw_output is set - // dt: time since last call in seconds - // WARNING: returned reference is valid as long as object - const std::vector& extract_points(cv::Mat &frame); - const std::vector& get_points() { return points; } - PointExtractor(); +public: + // extracts points from frame and draws some processing info into frame, if draw_output is set + // dt: time since last call in seconds + // WARNING: returned reference is valid as long as object + const std::vector& extract_points(cv::Mat &frame); + const std::vector& get_points() { return points; } + PointExtractor(); - int threshold_val; - int threshold_secondary_val; - int min_size, max_size; + int threshold_val; + int threshold_secondary_val; + int min_size, max_size; -protected: - std::vector points; - cv::Mat frame_last; +private: + std::vector points; + cv::Mat frame_last; }; #endif //POINTEXTRACTOR_H diff --git a/ftnoir_tracker_pt/trans_calib.h b/ftnoir_tracker_pt/trans_calib.h index 609c9af1..5c321b2c 100644 --- a/ftnoir_tracker_pt/trans_calib.h +++ b/ftnoir_tracker_pt/trans_calib.h @@ -20,20 +20,20 @@ class TranslationCalibrator { public: - TranslationCalibrator(); + TranslationCalibrator(); - // reset the calibration process - void reset(); + // reset the calibration process + void reset(); - // update the current estimate - void update(const cv::Matx33f& R_CM_k, const cv::Vec3f& t_CM_k); + // update the current estimate + void update(const cv::Matx33f& R_CM_k, const cv::Vec3f& t_CM_k); - // get the current estimate for t_MH - cv::Vec3f get_estimate(); + // get the current estimate for t_MH + cv::Vec3f get_estimate(); -protected: - cv::Matx66f P; // normalized precision matrix = inverse covariance - cv::Vec6f y; // P*(-t_MH, t_CH) +private: + cv::Matx66f P; // normalized precision matrix = inverse covariance + cv::Vec6f y; // P*(-t_MH, t_CH) }; -#endif //TRANSCALIB_H \ No newline at end of file +#endif //TRANSCALIB_H -- cgit v1.2.3 From cfffa29e29db6b2234c7f534b1ebcd612b7f4914 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 5 Oct 2014 01:22:11 +0200 Subject: flush --- CMakeLists.txt | 3 +- facetracknoir/quat.hpp | 66 ++++++++++ facetracknoir/rotation.h | 58 -------- facetracknoir/tracker.h | 2 - facetracknoir/tracker_types.h | 14 +- ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp | 148 ++++++++++----------- ftnoir_tracker_aruco/ftnoir_tracker_aruco.h | 13 +- ftnoir_tracker_aruco/include/aruco.h | 29 ++-- ftnoir_tracker_aruco/include/arucofidmarkers.h | 15 +-- ftnoir_tracker_aruco/include/board.h | 168 ------------------------ ftnoir_tracker_aruco/include/boarddetector.h | 139 -------------------- ftnoir_tracker_aruco/include/cameraparameters.h | 6 +- ftnoir_tracker_aruco/include/cvdrawingutils.h | 19 +-- ftnoir_tracker_aruco/include/exports.h | 6 +- ftnoir_tracker_aruco/include/marker.h | 16 +-- ftnoir_tracker_aruco/include/markerdetector.h | 56 ++++---- ftnoir_tracker_aruco/trans_calib.h | 2 +- ftnoir_tracker_pt/camera.h | 2 +- ftnoir_tracker_pt/ftnoir_tracker_pt.h | 54 ++++---- ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp | 88 ++++++------- ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h | 4 +- ftnoir_tracker_pt/point_extractor.h | 4 +- ftnoir_tracker_pt/point_tracker.h | 74 +++++------ ftnoir_tracker_pt/pt_video_widget.h | 2 +- ftnoir_tracker_pt/trans_calib.h | 2 +- 25 files changed, 338 insertions(+), 652 deletions(-) create mode 100644 facetracknoir/quat.hpp delete mode 100644 facetracknoir/rotation.h delete mode 100644 ftnoir_tracker_aruco/include/board.h delete mode 100644 ftnoir_tracker_aruco/include/boarddetector.h (limited to 'ftnoir_tracker_pt/trans_calib.h') diff --git a/CMakeLists.txt b/CMakeLists.txt index 655d70c3..2de8856d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -344,7 +344,6 @@ opentrack_library(opentrack-tracker-ht) target_link_libraries(opentrack-tracker-ht opentrack-compat) if(SDK_ARUCO_LIBPATH) - include_directories(${CMAKE_SOURCE_DIR}/ftnoir_tracker_aruco/include) opentrack_library(opentrack-tracker-aruco) target_link_libraries(opentrack-tracker-aruco ${SDK_ARUCO_LIBPATH} ${OpenCV_LIBS}) endif() @@ -477,7 +476,7 @@ install(DIRECTORY "${CMAKE_SOURCE_DIR}/3rdparty-notices" DESTINATION .) install(FILES "${CMAKE_SOURCE_DIR}/bin/NPClient.dll" "${CMAKE_SOURCE_DIR}/bin/NPClient64.dll" "${CMAKE_SOURCE_DIR}/bin/TrackIR.exe" DESTINATION .) install(DIRECTORY "${CMAKE_SOURCE_DIR}/bin/settings" "${CMAKE_SOURCE_DIR}/facetracknoir/clientfiles" DESTINATION .) -if(NOT WIN32 AND SDK_WINE_PREFIX) +if(NOT WIN32 AND SDK_WINE_PREFIX AND NOT SDK_WINE_NO_WRAPPER) install(FILES "${CMAKE_BINARY_DIR}/opentrack-wrapper-wine.exe.so" DESTINATION .) endif() diff --git a/facetracknoir/quat.hpp b/facetracknoir/quat.hpp new file mode 100644 index 00000000..1e268963 --- /dev/null +++ b/facetracknoir/quat.hpp @@ -0,0 +1,66 @@ +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#pragma once +#include + +class Quat { +private: + static constexpr double pi = 3.141592653; + static constexpr double r2d = 180./pi; + double a,b,c,d; // quaternion coefficients +public: + Quat() : a(1.0),b(0.0),c(0.0),d(0.0) {} + Quat(double yaw, double pitch, double roll) { from_euler_rads(yaw, pitch, roll); } + Quat(double a, double b, double c, double d) : a(a),b(b),c(c),d(d) {} + + Quat inv(){ + return Quat(a,-b,-c, -d); + } + + // conversions + // see http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles + void from_euler_rads(double yaw, double pitch, double roll) + { + + double sin_phi = sin(roll/2.0); + double cos_phi = cos(roll/2.0); + double sin_the = sin(pitch/2.0); + double cos_the = cos(pitch/2.0); + double sin_psi = sin(yaw/2.0); + double cos_psi = cos(yaw/2.0); + + a = cos_phi*cos_the*cos_psi + sin_phi*sin_the*sin_psi; + b = sin_phi*cos_the*cos_psi - cos_phi*sin_the*sin_psi; + c = cos_phi*sin_the*cos_psi + sin_phi*cos_the*sin_psi; + d = cos_phi*cos_the*sin_psi - sin_phi*sin_the*cos_psi; + } + + void to_euler_rads(double& yaw, double& pitch, double& roll) const + { + roll = atan2(2.0*(a*b + c*d), 1.0 - 2.0*(b*b + c*c)); + pitch = asin(2.0*(a*c - b*d)); + yaw = atan2(2.0*(a*d + b*c), 1.0 - 2.0*(c*c + d*d)); + } + + void to_euler_degrees(double& yaw, double& pitch, double& roll) const + { + to_euler_rads(yaw, pitch, roll); + yaw *= r2d; + pitch *= r2d; + roll *= r2d; + } + + const Quat operator*(const Quat& B) const + { + const Quat& A = *this; + return Quat(A.a*B.a - A.b*B.b - A.c*B.c - A.d*B.d, // quaternion multiplication + A.a*B.b + A.b*B.a + A.c*B.d - A.d*B.c, + A.a*B.c - A.b*B.d + A.c*B.a + A.d*B.b, + A.a*B.d + A.b*B.c - A.c*B.b + A.d*B.a); + } +}; diff --git a/facetracknoir/rotation.h b/facetracknoir/rotation.h deleted file mode 100644 index b3bb891e..00000000 --- a/facetracknoir/rotation.h +++ /dev/null @@ -1,58 +0,0 @@ -/* Copyright (c) 2012 Patrick Ruoff - * - * Permission to use, copy, modify, and/or distribute this software for any - * purpose with or without fee is hereby granted, provided that the above - * copyright notice and this permission notice appear in all copies. - */ - -#pragma once -#include - -class Quat { - -public: - Quat() : a(1.0),b(0.0),c(0.0),d(0.0) {} - Quat(double yaw, double pitch, double roll) { fromEuler(yaw, pitch, roll); } - Quat(double a, double b, double c, double d) : a(a),b(b),c(c),d(d) {} - - Quat inv(){ - return Quat(a,-b,-c, -d); - } - - // conversions - // see http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles - void fromEuler(double yaw, double pitch, double roll) - { - - double sin_phi = sin(roll/2.0); - double cos_phi = cos(roll/2.0); - double sin_the = sin(pitch/2.0); - double cos_the = cos(pitch/2.0); - double sin_psi = sin(yaw/2.0); - double cos_psi = cos(yaw/2.0); - - a = cos_phi*cos_the*cos_psi + sin_phi*sin_the*sin_psi; - b = sin_phi*cos_the*cos_psi - cos_phi*sin_the*sin_psi; - c = cos_phi*sin_the*cos_psi + sin_phi*cos_the*sin_psi; - d = cos_phi*cos_the*sin_psi - sin_phi*sin_the*cos_psi; - } - - void toEuler(double& yaw, double& pitch, double& roll) const - { - roll = atan2(2.0*(a*b + c*d), 1.0 - 2.0*(b*b + c*c)); - pitch = asin(2.0*(a*c - b*d)); - yaw = atan2(2.0*(a*d + b*c), 1.0 - 2.0*(c*c + d*d)); - } - - const Quat operator*(const Quat& B) const - { - const Quat& A = *this; - return Quat(A.a*B.a - A.b*B.b - A.c*B.c - A.d*B.d, // quaternion multiplication - A.a*B.b + A.b*B.a + A.c*B.d - A.d*B.c, - A.a*B.c - A.b*B.d + A.c*B.a + A.d*B.b, - A.a*B.d + A.b*B.c - A.c*B.b + A.d*B.a); - } - -private: - double a,b,c,d; // quaternion coefficients -}; diff --git a/facetracknoir/tracker.h b/facetracknoir/tracker.h index 05ae4180..3d9a3858 100644 --- a/facetracknoir/tracker.h +++ b/facetracknoir/tracker.h @@ -22,8 +22,6 @@ #include "facetracknoir/options.h" #include "facetracknoir/timer.hpp" - - class Tracker : protected QThread { Q_OBJECT private: diff --git a/facetracknoir/tracker_types.h b/facetracknoir/tracker_types.h index c667498e..02aacdcf 100644 --- a/facetracknoir/tracker_types.h +++ b/facetracknoir/tracker_types.h @@ -2,14 +2,14 @@ #include #include -#include "rotation.h" -#include "plugin-api.hpp" +#include "./quat.hpp" +#include "./plugin-api.hpp" struct T6DOF { private: - static constexpr double PI = 3.14159265358979323846264; - static constexpr double D2R = PI/180.0; - static constexpr double R2D = 180.0/PI; + static constexpr double pi = 3.141592653; + static constexpr double d2r = pi/180.0; + static constexpr double r2d = 180./pi; double axes[6]; public: @@ -23,13 +23,13 @@ public: Quat quat() const { - return Quat(axes[Yaw]*D2R, axes[Pitch]*D2R, axes[Roll]*D2R); + return Quat(axes[Yaw]*d2r, axes[Pitch]*d2r, axes[Roll]*d2r); } static T6DOF fromQuat(const Quat& q) { T6DOF ret; - q.toEuler(ret(Yaw), ret(Pitch), ret(Roll)); + q.to_euler_rads(ret(Yaw), ret(Pitch), ret(Roll)); return ret; } diff --git a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp index e216d319..a1e15721 100644 --- a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp +++ b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp @@ -8,13 +8,13 @@ #include "ftnoir_tracker_aruco.h" #include "ui_aruco-trackercontrols.h" #include "facetracknoir/plugin-api.hpp" -#include #include -#include -#include -#include +#include "include/markerdetector.h" +#include +#include #include #include +#include #if defined(_WIN32) # undef NOMINMAX @@ -29,51 +29,51 @@ static QList get_camera_names(void) { QList ret; #if defined(_WIN32) - // Create the System Device Enumerator. - HRESULT hr; - ICreateDevEnum *pSysDevEnum = NULL; - hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); - if (FAILED(hr)) - { - return ret; - } - // Obtain a class enumerator for the video compressor category. - IEnumMoniker *pEnumCat = NULL; - hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); - - if (hr == S_OK) { - // Enumerate the monikers. - IMoniker *pMoniker = NULL; - ULONG cFetched; - while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) { - IPropertyBag *pPropBag; - hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); - if (SUCCEEDED(hr)) { - // To retrieve the filter's friendly name, do the following: - VARIANT varName; - VariantInit(&varName); - hr = pPropBag->Read(L"FriendlyName", &varName, 0); - if (SUCCEEDED(hr)) - { - // Display the name in your UI somehow. - QString str((QChar*)varName.bstrVal, wcslen(varName.bstrVal)); - ret.append(str); - } - VariantClear(&varName); - - ////// To create an instance of the filter, do the following: - ////IBaseFilter *pFilter; - ////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, - //// (void**)&pFilter); - // Now add the filter to the graph. - //Remember to release pFilter later. - pPropBag->Release(); - } - pMoniker->Release(); - } - pEnumCat->Release(); - } - pSysDevEnum->Release(); + // Create the System Device Enumerator. + HRESULT hr; + ICreateDevEnum *pSysDevEnum = NULL; + hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); + if (FAILED(hr)) + { + return ret; + } + // Obtain a class enumerator for the video compressor category. + IEnumMoniker *pEnumCat = NULL; + hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); + + if (hr == S_OK) { + // Enumerate the monikers. + IMoniker *pMoniker = NULL; + ULONG cFetched; + while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) { + IPropertyBag *pPropBag; + hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); + if (SUCCEEDED(hr)) { + // To retrieve the filter's friendly name, do the following: + VARIANT varName; + VariantInit(&varName); + hr = pPropBag->Read(L"FriendlyName", &varName, 0); + if (SUCCEEDED(hr)) + { + // Display the name in your UI somehow. + QString str((QChar*)varName.bstrVal, wcslen(varName.bstrVal)); + ret.append(str); + } + VariantClear(&varName); + + ////// To create an instance of the filter, do the following: + ////IBaseFilter *pFilter; + ////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, + //// (void**)&pFilter); + // Now add the filter to the graph. + //Remember to release pFilter later. + pPropBag->Release(); + } + pMoniker->Release(); + } + pEnumCat->Release(); + } + pSysDevEnum->Release(); #else for (int i = 0; i < 16; i++) { char buf[128]; @@ -89,15 +89,15 @@ static QList get_camera_names(void) { } typedef struct { - int width; - int height; + int width; + int height; } resolution_tuple; static resolution_tuple resolution_choices[] = { - { 640, 480 }, - { 320, 240 }, - { 320, 200 }, - { 0, 0 } + { 640, 480 }, + { 320, 240 }, + { 320, 200 }, + { 0, 0 } }; Tracker::Tracker() : stop(false), layout(nullptr), videoWidget(nullptr) @@ -108,8 +108,8 @@ Tracker::~Tracker() { stop = true; wait(); - if (videoWidget) - delete videoWidget; + if (videoWidget) + delete videoWidget; if(layout) delete layout; qDebug() << "releasing camera, brace for impact"; @@ -178,7 +178,7 @@ void Tracker::run() } if (fps) camera.set(CV_CAP_PROP_FPS, fps); - + aruco::MarkerDetector detector; detector.setDesiredSpeed(3); @@ -187,7 +187,7 @@ void Tracker::run() cv::Mat color, color_, grayscale, rvec, tvec; const double stateful_coeff = 0.88; - + if (!camera.isOpened()) { fprintf(stderr, "aruco tracker: can't open camera\n"); @@ -214,7 +214,7 @@ void Tracker::run() grayscale = channel[2]; } else cv::cvtColor(color, grayscale, cv::COLOR_BGR2GRAY); - + gain.tick(camera, grayscale); const int scale = frame.cols > 480 ? 2 : 1; @@ -280,11 +280,11 @@ void Tracker::run() cv::putText(frame, buf, cv::Point(10, 32), cv::FONT_HERSHEY_PLAIN, scale, cv::Scalar(0, 255, 0), scale); ::sprintf(buf, "Jiffies: %ld", (long) (10000 * (time - tm) / freq)); cv::putText(frame, buf, cv::Point(10, 54), cv::FONT_HERSHEY_PLAIN, scale, cv::Scalar(80, 255, 0), scale); - + if (markers.size() == 1 && markers[0].size() == 4) { const auto& m = markers.at(0); const float size = 40; - + const double p = s.marker_pitch; const double sq = sin(p * HT_PI / 180); const double cq = cos(p * HT_PI / 180); @@ -380,7 +380,7 @@ void Tracker::run() void Tracker::GetHeadPoseData(double *data) { QMutexLocker lck(&mtx); - + data[Yaw] = pose[Yaw]; data[Pitch] = pose[Pitch]; data[Roll] = pose[Roll]; @@ -391,11 +391,11 @@ void Tracker::GetHeadPoseData(double *data) class TrackerDll : public Metadata { - // ITrackerDll interface - void getFullName(QString *strToBeFilled); - void getShortName(QString *strToBeFilled); - void getDescription(QString *strToBeFilled); - void getIcon(QIcon *icon); + // ITrackerDll interface + void getFullName(QString *strToBeFilled); + void getShortName(QString *strToBeFilled); + void getDescription(QString *strToBeFilled); + void getIcon(QIcon *icon); }; //----------------------------------------------------------------------------- @@ -406,12 +406,12 @@ void TrackerDll::getFullName(QString *strToBeFilled) void TrackerDll::getShortName(QString *strToBeFilled) { - *strToBeFilled = "aruco"; + *strToBeFilled = "aruco"; } void TrackerDll::getDescription(QString *strToBeFilled) { - *strToBeFilled = ""; + *strToBeFilled = ""; } void TrackerDll::getIcon(QIcon *icon) @@ -425,7 +425,7 @@ void TrackerDll::getIcon(QIcon *icon) extern "C" OPENTRACK_EXPORT Metadata* GetMetadata() { - return new TrackerDll; + return new TrackerDll; } //#pragma comment(linker, "/export:GetTracker=_GetTracker@0") @@ -444,11 +444,11 @@ TrackerControls::TrackerControls() { tracker = nullptr; calib_timer.setInterval(200); - ui.setupUi(this); + ui.setupUi(this); setAttribute(Qt::WA_NativeWindow, true); ui.cameraName->addItems(get_camera_names()); tie_setting(s.camera_index, ui.cameraName); - tie_setting(s.resolution, ui.resolution); + tie_setting(s.resolution, ui.resolution); tie_setting(s.force_fps, ui.cameraFPS); tie_setting(s.fov, ui.cameraFOV); tie_setting(s.headpos_x, ui.cx); @@ -500,7 +500,7 @@ void TrackerControls::doOK() s.b->save(); if (tracker) tracker->reload(); - this->close(); + this->close(); } void TrackerControls::doCancel() diff --git a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.h b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.h index 5416bb52..9ac57417 100644 --- a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.h +++ b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.h @@ -16,12 +16,9 @@ #include #include #include -#include -#include #include "facetracknoir/options.h" #include "ftnoir_tracker_aruco/trans_calib.h" #include "facetracknoir/plugin-api.hpp" - #include "facetracknoir/gain-control.hpp" using namespace options; @@ -50,7 +47,7 @@ class Tracker : protected QThread, public ITracker { Q_OBJECT public: - Tracker(); + Tracker(); ~Tracker() override; void StartTracker(QFrame* frame); void GetHeadPoseData(double *data); @@ -61,7 +58,7 @@ private: QMutex mtx; volatile bool stop; QHBoxLayout* layout; - ArucoVideoWidget* videoWidget; + ArucoVideoWidget* videoWidget; settings s; double pose[6]; cv::Mat frame; @@ -83,14 +80,14 @@ public: tracker = nullptr; } private: - Ui::Form ui; + Ui::Form ui; Tracker* tracker; settings s; TranslationCalibrator calibrator; QTimer calib_timer; private slots: - void doOK(); - void doCancel(); + void doOK(); + void doCancel(); void toggleCalibrate(); void cleanupCalib(); void update_tracker_calibration(); diff --git a/ftnoir_tracker_aruco/include/aruco.h b/ftnoir_tracker_aruco/include/aruco.h index 569b95fb..8ea583a8 100644 --- a/ftnoir_tracker_aruco/include/aruco.h +++ b/ftnoir_tracker_aruco/include/aruco.h @@ -26,12 +26,12 @@ The views and conclusions contained in the software and documentation are those authors and should not be interpreted as representing official policies, either expressed or implied, of Rafael Muñoz Salinas. - - + + \mainpage ArUco: Augmented Reality library from the University of Cordoba -ArUco is a minimal C++ library for detection of Augmented Reality markers based on OpenCv exclusively. +ArUco is a minimal C++ library for detection of Augmented Reality markers based on OpenCv exclusively. It is an educational project to show student how to detect augmented reality markers and it is provided under BSD license. @@ -54,11 +54,11 @@ Aruco allows the possibility to employ board. Boards are markers composed by an The library comes with five applications that will help you to learn how to use the library: - aruco_create_marker: which creates marker and saves it in a jpg file you can print. - - aruco_simple : simple test aplication that detects the markers in a image + - aruco_simple : simple test aplication that detects the markers in a image - aruco_test: this is the main application for detection. It reads images either from the camera of from a video and detect markers. Additionally, if you provide the intrinsics of the camera(obtained by OpenCv calibration) and the size of the marker in meters, the library calculates the marker intrinsics so that you can easily create your AR applications. - aruco_test_gl: shows how to use the library AR applications using OpenGL for rendering - aruco_create_board: application that helps you to create a board - - aruco_simple_board: simple test aplication that detects a board of markers in a image + - aruco_simple_board: simple test aplication that detects a board of markers in a image - aruco_test_board: application that detects boards - aruco_test_board_gl: application that detects boards and uses OpenGL to draw @@ -66,7 +66,7 @@ The library comes with five applications that will help you to learn how to use The ArUco library contents are divided in two main directories. The src directory, which contains the library itself. And the utils directory which contains the applications. -The library main classes are: +The library main classes are: - aruco::CameraParameters: represent the information of the camera that captures the images. Here you must set the calibration info. - aruco::Marker: which represent a marker detected in the image - aruco::MarkerDetector: that is in charge of deteting the markers in a image Detection is done by simple calling the member funcion ArMarkerDetector::detect(). Additionally, the classes contain members to create the required matrices for rendering using OpenGL. See aruco_test_gl for details @@ -101,34 +101,33 @@ The library has been compiled using MinGW and codeblocks. Below I describe the b -# Download the source code and compile it using cmake and codeblocks. Note: install the library in C:\ if you want it to be easily detected by cmake afterwards - step 4) aruco -# Download and decompress. - -# Open cmake gui application and set the path to the main library directory and also set a path where the project is going to be built. + -# Open cmake gui application and set the path to the main library directory and also set a path where the project is going to be built. -# Generate the codeblock project. -# Open the project with codeblock and compile then, install. The programs will be probably generated into the bin directory OpenGL: by default, the mingw version installed has not the glut library. So, the opengl programs are not compiled. If you want to compile with OpenGL support, you must install glut, or prefereably freeglut. -Thus, - - Download the library (http://www.martinpayne.me.uk/software/development/GLUT/freeglut-MinGW.zip) for mingw. - - Decompress in a directory X. +Thus, + - Download the library (http://www.martinpayne.me.uk/software/development/GLUT/freeglut-MinGW.zip) for mingw. + - Decompress in a directory X. - Then, rerun cmake setting the variable GLU_PATH to that directory (>cmake .. -DGLUT_PATH="C:\X") - Finally, recompile and test. Indeed, you should move the freeglut.dll to the directory with the binaries or to any other place in the PATH. CONCLUSION: Move to Linux, things are simpler :P - -\section Testing + +\section Testing For testing the applications, the library provides videos and the corresponding camera parameters of these videos. Into the directories you will find information on how to run the examples. - + \section Final Notes - REQUIREMENTS: OpenCv >= 2.1.0. and OpenGL for (aruco_test_gl and aruco_test_board_gl) - CONTACT: Rafael Munoz-Salinas: rmsalinas@uco.es - This libary is free software and come with no guaratee! - + */ #include "markerdetector.h" -#include "boarddetector.h" #include "cvdrawingutils.h" diff --git a/ftnoir_tracker_aruco/include/arucofidmarkers.h b/ftnoir_tracker_aruco/include/arucofidmarkers.h index 7dad4672..15eb8e4c 100644 --- a/ftnoir_tracker_aruco/include/arucofidmarkers.h +++ b/ftnoir_tracker_aruco/include/arucofidmarkers.h @@ -31,7 +31,6 @@ or implied, of Rafael Muñoz Salinas. #include #include "exports.h" #include "marker.h" -#include "board.h" namespace aruco { class ARUCO_EXPORTS FiducidalMarkers { @@ -80,7 +79,7 @@ public: * @param gridSize grid layout (numer of sqaures in x and Y) * @param MarkerSize size of markers sides in pixels * @param MarkerDistance distance between the markers - * @param TInfo output + * @param TInfo output * @param excludedIds set of ids excluded from the board */ static cv::Mat createBoardImage( cv::Size gridSize,int MarkerSize,int MarkerDistance, BoardConfiguration& TInfo ,vector *excludedIds=NULL ) throw (cv::Exception); @@ -89,24 +88,24 @@ public: /**Creates a printable image of a board in chessboard_like manner * @param gridSize grid layout (numer of sqaures in x and Y) * @param MarkerSize size of markers sides in pixels - * @param TInfo output + * @param TInfo output * @param setDataCentered indicates if the center is set at the center of the board. Otherwise it is the left-upper corner - * + * */ static cv::Mat createBoardImage_ChessBoard( cv::Size gridSize,int MarkerSize, BoardConfiguration& TInfo ,bool setDataCentered=true ,vector *excludedIds=NULL) throw (cv::Exception); - /**Creates a printable image of a board in a frame fashion + /**Creates a printable image of a board in a frame fashion * @param gridSize grid layout (numer of sqaures in x and Y) * @param MarkerSize size of markers sides in pixels * @param MarkerDistance distance between the markers - * @param TInfo output + * @param TInfo output * @param setDataCentered indicates if the center is set at the center of the board. Otherwise it is the left-upper corner - * + * */ static cv::Mat createBoardImage_Frame( cv::Size gridSize,int MarkerSize,int MarkerDistance, BoardConfiguration& TInfo ,bool setDataCentered=true,vector *excludedIds=NULL ) throw (cv::Exception); private: - + static vector getListOfValidMarkersIds_random(int nMarkers,vector *excluded) throw (cv::Exception); static cv::Mat rotate(const cv::Mat & in); static int hammDistMarker(cv::Mat bits); diff --git a/ftnoir_tracker_aruco/include/board.h b/ftnoir_tracker_aruco/include/board.h deleted file mode 100644 index c1d79292..00000000 --- a/ftnoir_tracker_aruco/include/board.h +++ /dev/null @@ -1,168 +0,0 @@ -/***************************** -Copyright 2011 Rafael Muñoz Salinas. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are -permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, this list of - conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright notice, this list - of conditions and the following disclaimer in the documentation and/or other materials - provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY Rafael Muñoz Salinas ''AS IS'' AND ANY EXPRESS OR IMPLIED -WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Rafael Muñoz Salinas OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -The views and conclusions contained in the software and documentation are those of the -authors and should not be interpreted as representing official policies, either expressed -or implied, of Rafael Muñoz Salinas. -********************************/ -#ifndef _Aruco_board_h -#define _Aruco_board_h -#include -#include -#include -#include "exports.h" -#include "marker.h" -using namespace std; -namespace aruco { -/** - * 3d representation of a marker - */ -struct ARUCO_EXPORTS MarkerInfo:public vector { - MarkerInfo() {} - MarkerInfo(int _id) {id=_id; } - MarkerInfo(const MarkerInfo&MI): vector(MI){id=MI.id; } - MarkerInfo & operator=(const MarkerInfo&MI){ - vector ::operator=(MI); - id=MI.id; - return *this; - } - int id;//maker id -}; - -/**\brief This class defines a board with several markers. - * A Board contains several markers so that they are more robustly detected. - * - * In general, a board is a set of markers. So BoardConfiguration is only a list - * of the id of the markers along with the position of their corners. - * - * The position of the corners can be specified either in pixels (in a non-specific size) or in meters. - * The first is the typical case in which you generate the image of board and the print it. Since you do not know in advance the real - * size of the markers, their corners are specified in pixels, and then, the translation to meters can be made once you know the real size. - * - * On the other hand, you may want to have the information of your boards in meters. The BoardConfiguration allows you to do so. - * - * The point is in the mInfoType variable. It can be either PIX or METERS according to your needs. - * -*/ - - -class ARUCO_EXPORTS BoardConfiguration: public vector -{ - friend class Board; -public: - enum MarkerInfoType {NONE=-1,PIX=0,METERS=1};//indicates if the data in MakersInfo is expressed in meters or in pixels so as to do conversion internally - //variable indicates if the data in MakersInfo is expressed in meters or in pixels so as to do conversion internally - int mInfoType; - /** - */ - BoardConfiguration(); - - /** - */ - BoardConfiguration(const BoardConfiguration &T); - - /** - */ - BoardConfiguration & operator=(const BoardConfiguration &T); - /**Saves the board info to a file - */ - void saveToFile(string sfile)throw (cv::Exception); - /**Reads board info from a file - */ - void readFromFile(string sfile)throw (cv::Exception); - /**Indicates if the corners are expressed in meters - */ - bool isExpressedInMeters()const { - return mInfoType==METERS; - } - /**Indicates if the corners are expressed in meters - */ - bool isExpressedInPixels()const { - return mInfoType==PIX; - } - /**Returns the index of the marker with id indicated, if is in the list - */ - int getIndexOfMarkerId(int id)const; - /**Returns the Info of the marker with id specified. If not in the set, throws exception - */ - const MarkerInfo& getMarkerInfo(int id)const throw (cv::Exception); - /**Set in the list passed the set of the ids - */ - void getIdList(vector &ids,bool append=true)const; -private: - /**Saves the board info to a file - */ - void saveToFile(cv::FileStorage &fs)throw (cv::Exception); - /**Reads board info from a file - */ - void readFromFile(cv::FileStorage &fs)throw (cv::Exception); -}; - -/** -*/ -class ARUCO_EXPORTS Board:public vector -{ - -public: - BoardConfiguration conf; - //matrices of rotation and translation respect to the camera - cv::Mat Rvec,Tvec; - /** - */ - Board() - { - Rvec.create(3,1,CV_32FC1); - Tvec.create(3,1,CV_32FC1); - for (int i=0;i<3;i++) - Tvec.at(i,0)=Rvec.at(i,0)=-999999; - } - - /**Given the extrinsic camera parameters returns the GL_MODELVIEW matrix for opengl. - * Setting this matrix, the reference corrdinate system will be set in this board - */ - void glGetModelViewMatrix(double modelview_matrix[16])throw(cv::Exception); - - /** - * Returns position vector and orientation quaternion for an Ogre scene node or entity. - * Use: - * ... - * Ogre::Vector3 ogrePos (position[0], position[1], position[2]); - * Ogre::Quaternion ogreOrient (orientation[0], orientation[1], orientation[2], orientation[3]); - * mySceneNode->setPosition( ogrePos ); - * mySceneNode->setOrientation( ogreOrient ); - * ... - */ - void OgreGetPoseParameters( double position[3], double orientation[4] )throw(cv::Exception); - - - /**Save this from a file - */ - void saveToFile(string filePath)throw(cv::Exception); - /**Read this from a file - */ - void readFromFile(string filePath)throw(cv::Exception); - -}; -} - -#endif diff --git a/ftnoir_tracker_aruco/include/boarddetector.h b/ftnoir_tracker_aruco/include/boarddetector.h deleted file mode 100644 index 4770b5c9..00000000 --- a/ftnoir_tracker_aruco/include/boarddetector.h +++ /dev/null @@ -1,139 +0,0 @@ -/***************************** -Copyright 2011 Rafael Muñoz Salinas. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are -permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, this list of - conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright notice, this list - of conditions and the following disclaimer in the documentation and/or other materials - provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY Rafael Muñoz Salinas ''AS IS'' AND ANY EXPRESS OR IMPLIED -WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Rafael Muñoz Salinas OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -The views and conclusions contained in the software and documentation are those of the -authors and should not be interpreted as representing official policies, either expressed -or implied, of Rafael Muñoz Salinas. -********************************/ -#ifndef _Aruco_BoardDetector_H -#define _Aruco_BoardDetector_H -#include -#include "exports.h" -#include "board.h" -#include "cameraparameters.h" -#include "markerdetector.h" -using namespace std; - -namespace aruco -{ - -/**\brief This class detects AR boards - * Version 1.2 - * There are two modes for board detection. - * First, the old way. (You first detect markers with MarkerDetector and then call to detect in this class. - * - * Second: New mode, marker detection is included in the class - * \code - - CameraParameters CP; - CP.readFromFile(path_cp) - BoardConfiguration BC; - BC.readFromFile(path_bc); - BoardDetector BD; - BD.setParams(BC,CP); //or only BD.setParams(BC) - //capture image - cv::Mat im; - capture_image(im); - - float prob=BD.detect(im); - if (prob>0.3) - CvDrawingUtils::draw3DAxis(im,BD.getDetectedBoard(),CP); - - \endcode - * -*/ -class ARUCO_EXPORTS BoardDetector -{ -public: - /** See discussion in @see enableRotateXAxis. - * Do not change unless you know what you are doing - */ - BoardDetector(bool setYPerperdicular=true); - - - /** - * Use if you plan to let this class to perform marker detection too - */ - void setParams(const BoardConfiguration &bc,const CameraParameters &cp, float markerSizeMeters=-1); - void setParams(const BoardConfiguration &bc); - /** - * Detect markers, and then, look for the board indicated in setParams() - * @return value indicating the likelihood of having found the marker - */ - float detect(const cv::Mat &im)throw (cv::Exception); - /**Returns a reference to the board detected - */ - Board & getDetectedBoard(){return _boardDetected;} - /**Returns a reference to the internal marker detector - */ - MarkerDetector &getMarkerDetector(){return _mdetector;} - /**Returns the vector of markers detected - */ - vector &getDetectedMarkers(){return _vmarkers;} - - - //ALTERNATIVE DETECTION METHOD, BASED ON MARKERS PREVIOUSLY DETECTED - - /** Given the markers detected, determines if there is the board passed - * @param detectedMarkers result provided by aruco::ArMarkerDetector - * @param BConf the board you want to see if is present - * @param Bdetected output information of the detected board - * @param camMatrix camera matrix with intrinsics - * @param distCoeff camera distorsion coeff - * @param camMatrix intrinsic camera information. - * @param distCoeff camera distorsion coefficient. If set Mat() if is assumed no camera distorion - * @param markerSizeMeters size of the marker sides expressed in meters - * @return value indicating the likelihood of having found the marker - */ - float detect(const vector &detectedMarkers,const BoardConfiguration &BConf, Board &Bdetected, cv::Mat camMatrix=cv::Mat(),cv::Mat distCoeff=cv::Mat(), float markerSizeMeters=-1 )throw (cv::Exception); - float detect(const vector &detectedMarkers,const BoardConfiguration &BConf, Board &Bdetected,const CameraParameters &cp, float markerSizeMeters=-1 )throw (cv::Exception); - - - /** - * By default, the Y axis is set to point up. However this is not the default - * operation mode of opencv, which produces the Z axis pointing up instead. - * So, to achieve this change, we have to rotate the X axis. - */ - void setYPerperdicular(bool enable){_setYPerperdicular=enable;} - - - - -private: - void rotateXAxis(cv::Mat &rotation); - bool _setYPerperdicular; - - //-- Functionality to detect markers inside - bool _areParamsSet; - BoardConfiguration _bconf; - Board _boardDetected; - float _markerSize; - CameraParameters _camParams; - MarkerDetector _mdetector;//internal markerdetector - vector _vmarkers;//markers detected in the call to : float detect(const cv::Mat &im); - -}; - -} -#endif - diff --git a/ftnoir_tracker_aruco/include/cameraparameters.h b/ftnoir_tracker_aruco/include/cameraparameters.h index c3381a74..a419afbe 100644 --- a/ftnoir_tracker_aruco/include/cameraparameters.h +++ b/ftnoir_tracker_aruco/include/cameraparameters.h @@ -28,7 +28,7 @@ or implied, of Rafael Muñoz Salinas. #ifndef _Aruco_CameraParameters_H #define _Aruco_CameraParameters_H #include "exports.h" -#include +#include #include using namespace std; namespace aruco @@ -105,7 +105,7 @@ public: * @param invert: indicates if the output projection matrix has to yield a horizontally inverted image because image data has not been stored in the order of glDrawPixels: bottom-to-top. */ void glGetProjectionMatrix( cv::Size orgImgSize, cv::Size size,double proj_matrix[16],double gnear,double gfar,bool invert=false )throw(cv::Exception); - + /** * setup camera for an Ogre project. * Use: @@ -117,7 +117,7 @@ public: * As in OpenGL, it assumes no camera distorsion */ void OgreGetProjectionMatrix( cv::Size orgImgSize, cv::Size size,double proj_matrix[16],double gnear,double gfar,bool invert=false )throw(cv::Exception); - + private: //GL routines diff --git a/ftnoir_tracker_aruco/include/cvdrawingutils.h b/ftnoir_tracker_aruco/include/cvdrawingutils.h index 38e9986e..24bfe630 100644 --- a/ftnoir_tracker_aruco/include/cvdrawingutils.h +++ b/ftnoir_tracker_aruco/include/cvdrawingutils.h @@ -33,19 +33,12 @@ namespace aruco { /**\brief A set of functions to draw in opencv images */ - class ARUCO_EXPORTS CvDrawingUtils - { - public: - - static void draw3dAxis(cv::Mat &Image,Marker &m,const CameraParameters &CP); - - static void draw3dCube(cv::Mat &Image,Marker &m,const CameraParameters &CP); - - static void draw3dAxis(cv::Mat &Image,Board &m,const CameraParameters &CP); - - static void draw3dCube(cv::Mat &Image,Board &m,const CameraParameters &CP); - - }; + class ARUCO_EXPORTS CvDrawingUtils + { + public: + static void draw3dAxis(cv::Mat &Image,Marker &m,const CameraParameters &CP); + static void draw3dCube(cv::Mat &Image,Marker &m,const CameraParameters &CP); + }; } #endif diff --git a/ftnoir_tracker_aruco/include/exports.h b/ftnoir_tracker_aruco/include/exports.h index 154605ec..044a1367 100644 --- a/ftnoir_tracker_aruco/include/exports.h +++ b/ftnoir_tracker_aruco/include/exports.h @@ -25,7 +25,7 @@ The views and conclusions contained in the software and documentation are those authors and should not be interpreted as representing official policies, either expressed or implied, of Rafael Muñoz Salinas. ********************************/ - + #ifndef __OPENARUCO_CORE_TYPES_H__ @@ -37,9 +37,9 @@ or implied, of Rafael Muñoz Salinas. #if (defined WIN32 || defined _WIN32 || defined WINCE) && defined DSO_EXPORTS - #define ARUCO_EXPORTS __declspec(dllexport) + #define ARUCO_EXPORTS __declspec(dllexport) __attribute__((visibility ("default"))) #else - #define ARUCO_EXPORTS + #define ARUCO_EXPORTS __attribute__((visibility ("default"))) #endif diff --git a/ftnoir_tracker_aruco/include/marker.h b/ftnoir_tracker_aruco/include/marker.h index dc6bb28c..89961002 100644 --- a/ftnoir_tracker_aruco/include/marker.h +++ b/ftnoir_tracker_aruco/include/marker.h @@ -29,7 +29,7 @@ or implied, of Rafael Muñoz Salinas. #define _Aruco_Marker_H #include #include -#include +#include #include "exports.h" #include "cameraparameters.h" using namespace std; @@ -81,12 +81,12 @@ public: * @param setYPerperdicular If set the Y axis will be perpendicular to the surface. Otherwise, it will be the Z axis */ void calculateExtrinsics(float markerSize,cv::Mat CameraMatrix,cv::Mat Distorsion=cv::Mat(),bool setYPerperdicular=true)throw(cv::Exception); - + /**Given the extrinsic camera parameters returns the GL_MODELVIEW matrix for opengl. * Setting this matrix, the reference coordinate system will be set in this marker */ void glGetModelViewMatrix( double modelview_matrix[16])throw(cv::Exception); - + /** * Returns position vector and orientation quaternion for an Ogre scene node or entity. * Use: @@ -97,8 +97,8 @@ public: * mySceneNode->setOrientation( ogreOrient ); * ... */ - void OgreGetPoseParameters( double position[3], double orientation[4] )throw(cv::Exception); - + void OgreGetPoseParameters( double position[3], double orientation[4] )throw(cv::Exception); + /**Returns the centroid of the marker */ cv::Point2f getCenter()const; @@ -132,11 +132,11 @@ public: return str; } - - + + private: void rotateXAxis(cv::Mat &rotation); - + }; } diff --git a/ftnoir_tracker_aruco/include/markerdetector.h b/ftnoir_tracker_aruco/include/markerdetector.h index 4d6e7b90..a4656527 100644 --- a/ftnoir_tracker_aruco/include/markerdetector.h +++ b/ftnoir_tracker_aruco/include/markerdetector.h @@ -27,7 +27,7 @@ or implied, of Rafael Muñoz Salinas. ********************************/ #ifndef _ARUCO_MarkerDetector_H #define _ARUCO_MarkerDetector_H -#include +#include #include #include #include "cameraparameters.h" @@ -47,7 +47,7 @@ class ARUCO_EXPORTS MarkerDetector class MarkerCandidate: public Marker{ public: MarkerCandidate(){} - MarkerCandidate(const Marker &M): Marker(M){} + MarkerCandidate(const Marker &M): Marker(M){} MarkerCandidate(const MarkerCandidate &M): Marker(M){ contour=M.contour; idx=M.idx; @@ -60,20 +60,20 @@ class ARUCO_EXPORTS MarkerDetector idx=M.idx; return M; } - + vector contour;//all the points of its contour int idx;//index position in the global contour list }; public: /** - * See + * See */ - MarkerDetector(); + MarkerDetector() {} /** */ - ~MarkerDetector(); + ~MarkerDetector() {} /**Detects the markers in the image passed * @@ -161,17 +161,17 @@ public: * of cols and rows. * @param min size of the contour to consider a possible marker as valid (0,1] * @param max size of the contour to consider a possible marker as valid [0,1) - * + * */ void setMinMaxSize(float min=0.03,float max=0.5)throw(cv::Exception); - + /**reads the min and max sizes employed * @param min output size of the contour to consider a possible marker as valid (0,1] * @param max output size of the contour to consider a possible marker as valid [0,1) - * + * */ void getMinMaxSize(float &min,float &max){min=_minSize;max=_maxSize;} - + /**Enables/Disables erosion process that is REQUIRED for chessboard like boards. * By default, this property is enabled */ @@ -210,10 +210,10 @@ public: markerIdDetector_ptrfunc=markerdetector_func; } - /** Use an smaller version of the input image for marker detection. + /** Use an smaller version of the input image for marker detection. * If your marker is small enough, you can employ an smaller image to perform the detection without noticeable reduction in the precision. * Internally, we are performing a pyrdown operation - * + * * @param level number of times the image size is divided by 2. Internally, we are performing a pyrdown. */ void pyrDown(unsigned int level){pyrdown_level=level;} @@ -247,17 +247,17 @@ public: * @return true if the operation succeed */ bool warp(cv::Mat &in,cv::Mat &out,cv::Size size, std::vector points)throw (cv::Exception); - - - + + + /** Refine MarkerCandidate Corner using LINES method * @param candidate candidate to refine corners */ - void refineCandidateLines(MarkerCandidate &candidate); - - + void refineCandidateLines(MarkerCandidate &candidate); + + /**DEPRECATED!!! Use the member function in CameraParameters - * + * * Given the intrinsic camera parameters returns the GL_PROJECTION matrix for opengl. * PLease NOTE that when using OpenGL, it is assumed no camera distorsion! So, if it is not true, you should have * undistor image @@ -308,26 +308,26 @@ private: */ int perimeter(std::vector &a); - + // //GL routines -// +// // static void argConvGLcpara2( double cparam[3][4], int width, int height, double gnear, double gfar, double m[16], bool invert )throw(cv::Exception); // static int arParamDecompMat( double source[3][4], double cpara[3][4], double trans[3][4] )throw(cv::Exception); // static double norm( double a, double b, double c ); // static double dot( double a1, double a2, double a3, // double b1, double b2, double b3 ); -// +// //detection of the void findBestCornerInRegion_harris(const cv::Mat & grey,vector & Corners,int blockSize); - - + + // auxiliar functions to perform LINES refinement void interpolate2Dline( const vector< cv::Point > &inPoints, cv::Point3f &outLine); - cv::Point2f getCrossPoint(const cv::Point3f& line1, const cv::Point3f& line2); - - - /**Given a vector vinout with elements and a boolean vector indicating the lements from it to remove, + cv::Point2f getCrossPoint(const cv::Point3f& line1, const cv::Point3f& line2); + + + /**Given a vector vinout with elements and a boolean vector indicating the lements from it to remove, * this function remove the elements * @param vinout * @param toRemove diff --git a/ftnoir_tracker_aruco/trans_calib.h b/ftnoir_tracker_aruco/trans_calib.h index 5c321b2c..c2c02b38 100644 --- a/ftnoir_tracker_aruco/trans_calib.h +++ b/ftnoir_tracker_aruco/trans_calib.h @@ -8,7 +8,7 @@ #ifndef TRANSCALIB_H #define TRANSCALIB_H -#include +#include //----------------------------------------------------------------------------- // Calibrates the translation from head to model = t_MH diff --git a/ftnoir_tracker_pt/camera.h b/ftnoir_tracker_pt/camera.h index 7ebbcb67..889bf2d3 100644 --- a/ftnoir_tracker_pt/camera.h +++ b/ftnoir_tracker_pt/camera.h @@ -8,7 +8,7 @@ #ifndef CAMERA_H #define CAMERA_H -#include +#include #ifndef OPENTRACK_API # include #else diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt.h b/ftnoir_tracker_pt/ftnoir_tracker_pt.h index 5bcfd37d..fff8d4ab 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt.h +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt.h @@ -22,7 +22,7 @@ #include #include #include -#include +#include #include #ifndef OPENTRACK_API # include @@ -36,53 +36,53 @@ class Tracker : public ITracker, protected QThread { public: - Tracker(); + Tracker(); ~Tracker() override; void StartTracker(QFrame* parent_window) override; void GetHeadPoseData(double* data) override; void apply(settings& s); - void apply_inner(); - void center(); - void reset(); // reset the trackers internal state variables + void apply_inner(); + void center(); + void reset(); // reset the trackers internal state variables - void get_pose(FrameTrafo* X_CM) { QMutexLocker lock(&mutex); *X_CM = point_tracker.get_pose(); } - int get_n_points() { QMutexLocker lock(&mutex); return point_extractor.get_points().size(); } - void get_cam_info(CamInfo* info) { QMutexLocker lock(&mutex); *info = camera.get_info(); } + void get_pose(FrameTrafo* X_CM) { QMutexLocker lock(&mutex); *X_CM = point_tracker.get_pose(); } + int get_n_points() { QMutexLocker lock(&mutex); return point_extractor.get_points().size(); } + void get_cam_info(CamInfo* info) { QMutexLocker lock(&mutex); *info = camera.get_info(); } protected: void run() override; private: - QMutex mutex; - // thread commands - enum Command { - ABORT = 1<<0 - }; - void set_command(Command command); - void reset_command(Command command); + QMutex mutex; + // thread commands + enum Command { + ABORT = 1<<0 + }; + void set_command(Command command); + void reset_command(Command command); volatile int commands; CVCamera camera; - FrameRotation frame_rotation; - PointExtractor point_extractor; - PointTracker point_tracker; + FrameRotation frame_rotation; + PointExtractor point_extractor; + PointTracker point_tracker; - FrameTrafo X_GH_0; // for centering - cv::Vec3f t_MH; // translation from model frame to head frame - cv::Matx33f R_GC; // rotation from opengl reference frame to camera frame + FrameTrafo X_GH_0; // for centering + cv::Vec3f t_MH; // translation from model frame to head frame + cv::Matx33f R_GC; // rotation from opengl reference frame to camera frame - // --- ui --- - cv::Mat frame; // the output frame for display + // --- ui --- + cv::Mat frame; // the output frame for display PTVideoWidget* video_widget; - QFrame* video_frame; - + QFrame* video_frame; + settings s; std::atomic new_settings; Timer time; - + static constexpr double rad2deg = 180.0/3.14159265; static constexpr double deg2rad = 3.14159265/180.0; - + PointModel model; }; diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp b/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp index 6cd6135c..3af7b560 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp @@ -9,7 +9,7 @@ #include #include -#include +#include #ifndef OPENTRACK_API # include #else @@ -25,14 +25,14 @@ TrackerDialog::TrackerDialog() timer(this), trans_calib_running(false) { - ui.setupUi( this ); + ui.setupUi( this ); vector device_names; - get_camera_device_names(device_names); + get_camera_device_names(device_names); for (vector::iterator iter = device_names.begin(); iter != device_names.end(); ++iter) - { - ui.camdevice_combo->addItem(iter->c_str()); - } + { + ui.camdevice_combo->addItem(iter->c_str()); + } ui.camroll_combo->addItem("-90"); ui.camroll_combo->addItem("0"); @@ -82,7 +82,7 @@ TrackerDialog::TrackerDialog() connect(ui.model_tabs, SIGNAL(currentChanged(int)), this, SLOT(set_model(int))); connect(&timer,SIGNAL(timeout()), this,SLOT(poll_tracker_info())); - timer.start(100); + timer.start(100); connect(ui.buttonBox_2, SIGNAL(clicked(QAbstractButton*)), this, SLOT(do_apply_without_saving(QAbstractButton*))); } @@ -96,7 +96,7 @@ void TrackerDialog::set_model_clip() s.m02_y = -static_cast(s.clip_by); s.m02_z = -static_cast(s.clip_bz); - settings_changed(); + settings_changed(); } void TrackerDialog::set_model_cap() @@ -108,12 +108,12 @@ void TrackerDialog::set_model_cap() s.m02_y = -static_cast(s.cap_y); s.m02_z = -static_cast(s.cap_z); - settings_changed(); + settings_changed(); } void TrackerDialog::set_model_custom() { - settings_changed(); + settings_changed(); } void TrackerDialog::set_model(int val) @@ -123,38 +123,38 @@ void TrackerDialog::set_model(int val) void TrackerDialog::startstop_trans_calib(bool start) { - if (start) - { - qDebug()<<"TrackerDialog:: Starting translation calibration"; - trans_calib.reset(); - trans_calib_running = true; - } - else - { - qDebug()<<"TrackerDialog:: Stoppping translation calibration"; - trans_calib_running = false; + if (start) + { + qDebug()<<"TrackerDialog:: Starting translation calibration"; + trans_calib.reset(); + trans_calib_running = true; + } + else + { + qDebug()<<"TrackerDialog:: Stoppping translation calibration"; + trans_calib_running = false; { auto tmp = trans_calib.get_estimate(); s.t_MH_x = tmp[0]; s.t_MH_y = tmp[1]; s.t_MH_z = tmp[2]; } - settings_changed(); - } + settings_changed(); + } } void TrackerDialog::poll_tracker_info() { if (tracker) - { + { QString to_print; - + // display caminfo CamInfo info; tracker->get_cam_info(&info); to_print = QString::number(info.res_x)+"x"+QString::number(info.res_y)+" @ "+QString::number(info.fps)+" FPS"; ui.caminfo_label->setText(to_print); - + // display pointinfo int n_points = tracker->get_n_points(); to_print = QString::number(n_points); @@ -163,7 +163,7 @@ void TrackerDialog::poll_tracker_info() else to_print += " BAD!"; ui.pointinfo_label->setText(to_print); - + // update calibration if (trans_calib_running) trans_calib_step(); } @@ -177,16 +177,16 @@ void TrackerDialog::poll_tracker_info() void TrackerDialog::trans_calib_step() { - if (tracker) - { - FrameTrafo X_CM; - tracker->get_pose(&X_CM); - trans_calib.update(X_CM.R, X_CM.t); - cv::Vec3f t_MH = trans_calib.get_estimate(); + if (tracker) + { + FrameTrafo X_CM; + tracker->get_pose(&X_CM); + trans_calib.update(X_CM.R, X_CM.t); + cv::Vec3f t_MH = trans_calib.get_estimate(); s.t_MH_x = t_MH[0]; s.t_MH_y = t_MH[1]; s.t_MH_z = t_MH[2]; - } + } } void TrackerDialog::settings_changed() @@ -203,7 +203,7 @@ void TrackerDialog::save() void TrackerDialog::doOK() { save(); - close(); + close(); } void TrackerDialog::do_apply_without_saving(QAbstractButton*) @@ -225,7 +225,7 @@ void TrackerDialog::do_apply_without_saving(QAbstractButton*) void TrackerDialog::doApply() { - save(); + save(); } void TrackerDialog::doCancel() @@ -236,23 +236,23 @@ void TrackerDialog::doCancel() void TrackerDialog::registerTracker(ITracker *t) { - qDebug()<<"TrackerDialog:: Tracker registered"; - tracker = static_cast(t); + qDebug()<<"TrackerDialog:: Tracker registered"; + tracker = static_cast(t); if (isVisible() & s.b->modifiedp()) tracker->apply(s); - ui.tcalib_button->setEnabled(true); - //ui.center_button->setEnabled(true); + ui.tcalib_button->setEnabled(true); + //ui.center_button->setEnabled(true); } void TrackerDialog::unRegisterTracker() { - qDebug()<<"TrackerDialog:: Tracker un-registered"; - tracker = NULL; - ui.tcalib_button->setEnabled(false); - //ui.center_button->setEnabled(false); + qDebug()<<"TrackerDialog:: Tracker un-registered"; + tracker = NULL; + ui.tcalib_button->setEnabled(false); + //ui.center_button->setEnabled(false); } extern "C" OPENTRACK_EXPORT ITrackerDialog* GetDialog( ) { - return new TrackerDialog; + return new TrackerDialog; } diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h b/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h index 365776e4..e8cac679 100644 --- a/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h +++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h @@ -8,7 +8,7 @@ #ifndef FTNOIR_TRACKER_PT_SETTINGS_H #define FTNOIR_TRACKER_PT_SETTINGS_H -#include +#include #include "point_tracker.h" #include "facetracknoir/options.h" @@ -36,7 +36,7 @@ struct settings value clip_ty, clip_tz, clip_by, clip_bz; value active_model_panel, cap_x, cap_y, cap_z; - + // XXX todo red channel only, good for crapola CCD sensors -sh 20140922 settings() : diff --git a/ftnoir_tracker_pt/point_extractor.h b/ftnoir_tracker_pt/point_extractor.h index 3ef82900..5252b68d 100644 --- a/ftnoir_tracker_pt/point_extractor.h +++ b/ftnoir_tracker_pt/point_extractor.h @@ -8,8 +8,8 @@ #ifndef POINTEXTRACTOR_H #define POINTEXTRACTOR_H -#include -#include +#include +#include // ---------------------------------------------------------------------------- // Extracts points from an opencv image diff --git a/ftnoir_tracker_pt/point_tracker.h b/ftnoir_tracker_pt/point_tracker.h index c8212538..d65494a4 100644 --- a/ftnoir_tracker_pt/point_tracker.h +++ b/ftnoir_tracker_pt/point_tracker.h @@ -8,7 +8,7 @@ #ifndef POINTTRACKER_H #define POINTTRACKER_H -#include +#include #ifndef OPENTRACK_API # include #else @@ -21,31 +21,31 @@ class FrameTrafo { public: - FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {} - FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {} + FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {} + FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {} - cv::Matx33f R; - cv::Vec3f t; + cv::Matx33f R; + cv::Vec3f t; }; inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) { - return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t); + return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t); } inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y) { - return FrameTrafo(X*Y.R, X*Y.t); + return FrameTrafo(X*Y.R, X*Y.t); } inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y) { - return FrameTrafo(X.R*Y, X.t); + return FrameTrafo(X.R*Y, X.t); } inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v) { - return X.R*v + X.t; + return X.R*v + X.t; } @@ -55,28 +55,28 @@ inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v) // [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"] class PointModel { - friend class PointTracker; + friend class PointTracker; public: - static constexpr int N_POINTS = 3; + static constexpr int N_POINTS = 3; - PointModel(cv::Vec3f M01, cv::Vec3f M02); + PointModel(cv::Vec3f M01, cv::Vec3f M02); PointModel(); - inline const cv::Vec3f& get_M01() const { return M01; } - inline const cv::Vec3f& get_M02() const { return M02; } + inline const cv::Vec3f& get_M01() const { return M01; } + inline const cv::Vec3f& get_M02() const { return M02; } private: - cv::Vec3f M01; // M01 in model frame - cv::Vec3f M02; // M02 in model frame + cv::Vec3f M01; // M01 in model frame + cv::Vec3f M02; // M02 in model frame - cv::Vec3f u; // unit vector perpendicular to M01,M02-plane + cv::Vec3f u; // unit vector perpendicular to M01,M02-plane - cv::Matx22f P; + cv::Matx22f P; - cv::Vec2f d; // determinant vector for point correspondence - int d_order[3]; // sorting of projected model points with respect to d scalar product + cv::Vec2f d; // determinant vector for point correspondence + int d_order[3]; // sorting of projected model points with respect to d scalar product - void get_d_order(const std::vector& points, int d_order[]) const; + void get_d_order(const std::vector& points, int d_order[]) const; }; // ---------------------------------------------------------------------------- @@ -86,29 +86,29 @@ private: class PointTracker { public: - PointTracker(); - // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5) - // f : (focal length)/(sensor width) - // dt : time since last call - void track(const std::vector& projected_points, const PointModel& model); - FrameTrafo get_pose() const { return X_CM; } - void reset(); + PointTracker(); + // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5) + // f : (focal length)/(sensor width) + // dt : time since last call + void track(const std::vector& projected_points, const PointModel& model); + FrameTrafo get_pose() const { return X_CM; } + void reset(); private: // the points in model order typedef struct { cv::Vec2f points[PointModel::N_POINTS]; } PointOrder; - static constexpr float focal_length = 1.0f; - - inline cv::Vec2f project(const cv::Vec3f& v_M) - { - cv::Vec3f v_C = X_CM * v_M; - return cv::Vec2f(focal_length*v_C[0]/v_C[2], focal_length*v_C[1]/v_C[2]); - } + static constexpr float focal_length = 1.0f; + + inline cv::Vec2f project(const cv::Vec3f& v_M) + { + cv::Vec3f v_C = X_CM * v_M; + return cv::Vec2f(focal_length*v_C[0]/v_C[2], focal_length*v_C[1]/v_C[2]); + } PointOrder find_correspondences(const std::vector& projected_points, const PointModel &model); int POSIT(const PointModel& point_model, const PointOrder& order); // The POSIT algorithm, returns the number of iterations - - FrameTrafo X_CM; // trafo from model to camera + + FrameTrafo X_CM; // trafo from model to camera }; #endif //POINTTRACKER_H diff --git a/ftnoir_tracker_pt/pt_video_widget.h b/ftnoir_tracker_pt/pt_video_widget.h index de2c7efb..f2b41d63 100644 --- a/ftnoir_tracker_pt/pt_video_widget.h +++ b/ftnoir_tracker_pt/pt_video_widget.h @@ -10,7 +10,7 @@ #include #include #include -#include +#include #ifndef OPENTRACK_API # include # include diff --git a/ftnoir_tracker_pt/trans_calib.h b/ftnoir_tracker_pt/trans_calib.h index 5c321b2c..c2c02b38 100644 --- a/ftnoir_tracker_pt/trans_calib.h +++ b/ftnoir_tracker_pt/trans_calib.h @@ -8,7 +8,7 @@ #ifndef TRANSCALIB_H #define TRANSCALIB_H -#include +#include //----------------------------------------------------------------------------- // Calibrates the translation from head to model = t_MH -- cgit v1.2.3