diff options
author | Stanislaw Halik <sthalik@misaki.pl> | 2014-01-08 21:45:34 +0100 |
---|---|---|
committer | Stanislaw Halik <sthalik@misaki.pl> | 2014-01-08 21:45:34 +0100 |
commit | 0a32ae32452b02fae0036f9c5099b043d0c9052e (patch) | |
tree | 9accaf85371174de22c4df260be2a3eb1104b101 | |
parent | a6bb6becb0365f2dc79a24d0e3979d42f1496a21 (diff) |
buffer flush
-rw-r--r-- | FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 30 | ||||
-rw-r--r-- | FTNoIR_Tracker_PT/point_tracker.cpp | 33 | ||||
-rw-r--r-- | FTNoIR_Tracker_PT/point_tracker.h | 18 |
3 files changed, 47 insertions, 34 deletions
diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index a3e8919b..7bd447cb 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -83,8 +83,8 @@ void Tracker::run() if (new_frame && !frame.empty())
{
frame = frame_rotation.rotate_frame(frame);
- const std::vector<cv::Vec2f>& points = point_extractor.extract_points(frame, dt, false);
- tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows, t_MH);
+ const std::vector<cv::Vec2f>& points = point_extractor.extract_points(frame, dt, true);
+ tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows);
video_widget->update_image(frame);
}
#ifdef PT_PERF_LOG
@@ -119,15 +119,13 @@ void Tracker::apply(settings& s) point_tracker.dt_reset = s.reset_time / 1000.0;
t_MH = cv::Vec3f(s.t_MH_x, s.t_MH_y, s.t_MH_z);
R_GC = Matx33f( cos(deg2rad*s.cam_yaw), 0, sin(deg2rad*s.cam_yaw),
- 0, 1, 0,
- -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw));
- R_GC = R_GC * Matx33f( 1, 0, 0,
+ 0, 1, 0,
+ -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw));
+ R_GC = R_GC * Matx33f( 1, 0, 0,
0, cos(deg2rad*s.cam_pitch), sin(deg2rad*s.cam_pitch),
0, -sin(deg2rad*s.cam_pitch), cos(deg2rad*s.cam_pitch));
-
- FrameTrafo X_MH(Matx33f::eye(), t_MH);
- X_GH_0 = R_GC * X_MH;
-
+ FrameTrafo X_MH(Matx33f::eye(), t_MH);
+ X_GH_0 = R_GC * X_MH;
qDebug()<<"Tracker::apply ends";
}
@@ -140,10 +138,10 @@ void Tracker::reset() void Tracker::center()
{
point_tracker.reset(); // we also do a reset here since there is no reset shortkey yet
- QMutexLocker lock(&mutex);
- FrameTrafo X_CM_0 = point_tracker.get_pose();
- FrameTrafo X_MH(Matx33f::eye(), t_MH);
- X_GH_0 = R_GC * X_CM_0 * X_MH;
+ QMutexLocker lock(&mutex);
+ FrameTrafo X_CM_0 = point_tracker.get_pose();
+ FrameTrafo X_MH(Matx33f::eye(), t_MH);
+ X_GH_0 = R_GC * X_CM_0 * X_MH;
}
bool Tracker::get_frame_and_points(cv::Mat& frame_copy, boost::shared_ptr< std::vector<Vec2f> >& points)
@@ -198,10 +196,10 @@ void Tracker::GetHeadPoseData(THeadPoseData *data) if (!tracking_valid) return;
FrameTrafo X_CM = point_tracker.get_pose();
- FrameTrafo X_MH(Matx33f::eye(), t_MH);
- FrameTrafo X_GH = R_GC * X_CM * X_MH;
+ FrameTrafo X_MH(Matx33f::eye(), t_MH);
+ FrameTrafo X_GH = R_GC * X_CM * X_MH;
Matx33f R = X_GH.R * X_GH_0.R.t();
- Vec3f t = X_GH.t - X_GH_0.t;
+ Vec3f t = X_GH.t - X_GH_0.t;
// get translation(s)
if (s.bEnableX) data[TX] = t[0] / 10.0; // convert to cm
diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 1df70b17..263be43a 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -104,6 +104,9 @@ void PointTracker::reset() init_phase = true;
dt_valid = 0;
reset_velocities();
+ // assume identity rotation again
+ X_CM.R = cv::Matx33d::eye();
+ X_CM.t = cv::Vec3f();
}
void PointTracker::reset_velocities()
@@ -113,7 +116,7 @@ void PointTracker::reset_velocities() }
-bool PointTracker::track(const vector<Vec2f>& points, float fov, float dt, int w, int h, const cv::Vec3f& headpos)
+bool PointTracker::track(const vector<Vec2f>& points, float fov, float dt, int w, int h)
{
if (!dynamic_pose_resolution) init_phase = true;
@@ -150,7 +153,7 @@ bool PointTracker::track(const vector<Vec2f>& points, float fov, float dt, int w }
// XXX TODO fov
- POSIT(fov, w, h, headpos);
+ POSIT(fov, w, h);
//qDebug()<<"Number of POSIT iterations: "<<n_iter;
if (!init_phase)
@@ -165,7 +168,7 @@ bool PointTracker::track(const vector<Vec2f>& points, float fov, float dt, int w void PointTracker::predict(float dt)
{
// predict with constant velocity
- Matx33f R;
+ Matx33d R;
Rodrigues(dt*v_r, R);
X_CM.R = R*X_CM.R;
X_CM.t += dt * v_t;
@@ -232,7 +235,7 @@ bool PointTracker::find_correspondences(const vector<Vec2f>& points) -void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos)
+void PointTracker::POSIT(float fov, int w, int h)
{
// XXX hack
this->fov = fov;
@@ -241,9 +244,9 @@ void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos) std::vector<cv::Point3f> obj_points;
std::vector<cv::Point2f> img_points;
- obj_points.push_back(headpos);
- obj_points.push_back(point_model->M01 + headpos);
- obj_points.push_back(point_model->M02 + headpos);
+ obj_points.push_back(cv::Vec3f(0, 0, 0));
+ obj_points.push_back(point_model->M01);
+ obj_points.push_back(point_model->M02);
img_points.push_back(p[0]);
img_points.push_back(p[1]);
@@ -262,17 +265,29 @@ void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos) cv::Mat dist_coeffs = cv::Mat::zeros(5, 1, CV_32FC1);
- bool lastp = !rvec.empty() && !tvec.empty() && !init_phase;
+ bool lastp = !rvec.empty() && !tvec.empty();
cv::solvePnP(obj_points, img_points, intrinsics, dist_coeffs, rvec, tvec, lastp, cv::ITERATIVE);
cv::Mat rmat;
cv::Rodrigues(rvec, rmat);
+ // finally, find the closer solution
+ cv::Mat expected = cv::Mat(X_CM.R);
+ cv::Mat eye = cv::Mat::eye(3, 3, CV_64FC1);
+ double dev1 = norm(eye - expected * rmat.t());
+ double dev2 = norm(eye - expected * rmat);
+
+ if (dev1 > dev2)
+ {
+ rmat = rmat.t();
+ cv::Rodrigues(rmat, rvec);
+ }
+
// apply results
for (int i = 0; i < 3; i++)
{
- X_CM.t[i] = tvec.at<double>(i);
+ X_CM.t[i] = tvec.at<double>(i) * 1e-2;
for (int j = 0; j < 3; j++)
X_CM.R(i, j) = rmat.at<double>(i, j);
}
diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index e05e8f98..823d75c0 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -21,11 +21,11 @@ class FrameTrafo
{
public:
- FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {}
- FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {}
+ FrameTrafo() : R(cv::Matx33d::eye()), t(0,0,0) {}
+ FrameTrafo(const cv::Matx33d& R, const cv::Vec3d& t) : R(R),t(t) {}
- cv::Matx33f R;
- cv::Vec3f t;
+ cv::Matx33d R;
+ cv::Vec3d t;
};
inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y)
@@ -33,17 +33,17 @@ inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t);
}
-inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y)
+inline FrameTrafo operator*(const cv::Matx33d& X, const FrameTrafo& Y)
{
return FrameTrafo(X*Y.R, X*Y.t);
}
-inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y)
+inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33d& Y)
{
return FrameTrafo(X.R*Y, X.t);
}
-inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v)
+inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3d& v)
{
return X.R*v + X.t;
}
@@ -90,7 +90,7 @@ public: // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5)
// f : (focal length)/(sensor width)
// dt : time since last call
- bool track(const std::vector<cv::Vec2f>& points, float fov, float dt, int w, int h, const cv::Vec3f &headpos);
+ bool track(const std::vector<cv::Vec2f>& points, float fov, float dt, int w, int h);
boost::shared_ptr<PointModel> point_model;
bool dynamic_pose_resolution;
@@ -136,7 +136,7 @@ protected: void reset_velocities();
- void POSIT(float fov, int w, int h, const cv::Vec3f &headpos); // The POSIT algorithm, returns the number of iterations
+ void POSIT(float fov, int w, int h); // The POSIT algorithm, returns the number of iterations
bool init_phase;
float dt_valid; // time since last valid tracking result
|