summaryrefslogtreecommitdiffhomepage
path: root/FTNoIR_Tracker_PT
diff options
context:
space:
mode:
authorStanislaw Halik <sthalik@misaki.pl>2014-01-12 19:54:03 +0100
committerStanislaw Halik <sthalik@misaki.pl>2014-01-12 19:54:03 +0100
commit9d21e7d23fa17b91fb76a14d90630b027b78c2c8 (patch)
treedf023ec4d0de77826d1e2e8fe72331f9b82f4e85 /FTNoIR_Tracker_PT
parent80c43ba62517d1228423f9d1992999edda612616 (diff)
Revert "buffer flush"
This reverts commit 0f6a0d8b20aa1d7415f6d0596f91ea9b766ecf69.
Diffstat (limited to 'FTNoIR_Tracker_PT')
-rw-r--r--FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp30
-rw-r--r--FTNoIR_Tracker_PT/point_tracker.cpp33
-rw-r--r--FTNoIR_Tracker_PT/point_tracker.h18
3 files changed, 34 insertions, 47 deletions
diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp
index 83bf6911..740cf3da 100644
--- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp
+++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp
@@ -82,8 +82,8 @@ void Tracker::run()
if (new_frame && !frame.empty())
{
frame = frame_rotation.rotate_frame(frame);
- const std::vector<cv::Vec2f>& points = point_extractor.extract_points(frame, dt, true);
- tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows);
+ const std::vector<cv::Vec2f>& points = point_extractor.extract_points(frame, dt, false);
+ tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows, t_MH);
video_widget->update_image(frame);
}
#ifdef PT_PERF_LOG
@@ -118,13 +118,15 @@ void Tracker::apply(settings& s)
point_tracker.dt_reset = s.reset_time / 1000.0;
t_MH = cv::Vec3f(s.t_MH_x, s.t_MH_y, s.t_MH_z);
R_GC = Matx33f( cos(deg2rad*s.cam_yaw), 0, sin(deg2rad*s.cam_yaw),
- 0, 1, 0,
- -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw));
- R_GC = R_GC * Matx33f( 1, 0, 0,
+ 0, 1, 0,
+ -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw));
+ R_GC = R_GC * Matx33f( 1, 0, 0,
0, cos(deg2rad*s.cam_pitch), sin(deg2rad*s.cam_pitch),
0, -sin(deg2rad*s.cam_pitch), cos(deg2rad*s.cam_pitch));
- FrameTrafo X_MH(Matx33f::eye(), t_MH);
- X_GH_0 = R_GC * X_MH;
+
+ FrameTrafo X_MH(Matx33f::eye(), t_MH);
+ X_GH_0 = R_GC * X_MH;
+
qDebug()<<"Tracker::apply ends";
}
@@ -137,10 +139,10 @@ void Tracker::reset()
void Tracker::center()
{
point_tracker.reset(); // we also do a reset here since there is no reset shortkey yet
- QMutexLocker lock(&mutex);
- FrameTrafo X_CM_0 = point_tracker.get_pose();
- FrameTrafo X_MH(Matx33f::eye(), t_MH);
- X_GH_0 = R_GC * X_CM_0 * X_MH;
+ QMutexLocker lock(&mutex);
+ FrameTrafo X_CM_0 = point_tracker.get_pose();
+ FrameTrafo X_MH(Matx33f::eye(), t_MH);
+ X_GH_0 = R_GC * X_CM_0 * X_MH;
}
bool Tracker::get_frame_and_points(cv::Mat& frame_copy, boost::shared_ptr< std::vector<Vec2f> >& points)
@@ -195,10 +197,10 @@ void Tracker::GetHeadPoseData(THeadPoseData *data)
if (!tracking_valid) return;
FrameTrafo X_CM = point_tracker.get_pose();
- FrameTrafo X_MH(Matx33f::eye(), t_MH);
- FrameTrafo X_GH = R_GC * X_CM * X_MH;
+ FrameTrafo X_MH(Matx33f::eye(), t_MH);
+ FrameTrafo X_GH = R_GC * X_CM * X_MH;
Matx33f R = X_GH.R * X_GH_0.R.t();
- Vec3f t = X_GH.t - X_GH_0.t;
+ Vec3f t = X_GH.t - X_GH_0.t;
// get translation(s)
if (s.bEnableX) data[TX] = t[0] / 10.0; // convert to cm
diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp
index 263be43a..1df70b17 100644
--- a/FTNoIR_Tracker_PT/point_tracker.cpp
+++ b/FTNoIR_Tracker_PT/point_tracker.cpp
@@ -104,9 +104,6 @@ void PointTracker::reset()
init_phase = true;
dt_valid = 0;
reset_velocities();
- // assume identity rotation again
- X_CM.R = cv::Matx33d::eye();
- X_CM.t = cv::Vec3f();
}
void PointTracker::reset_velocities()
@@ -116,7 +113,7 @@ void PointTracker::reset_velocities()
}
-bool PointTracker::track(const vector<Vec2f>& points, float fov, float dt, int w, int h)
+bool PointTracker::track(const vector<Vec2f>& points, float fov, float dt, int w, int h, const cv::Vec3f& headpos)
{
if (!dynamic_pose_resolution) init_phase = true;
@@ -153,7 +150,7 @@ bool PointTracker::track(const vector<Vec2f>& points, float fov, float dt, int w
}
// XXX TODO fov
- POSIT(fov, w, h);
+ POSIT(fov, w, h, headpos);
//qDebug()<<"Number of POSIT iterations: "<<n_iter;
if (!init_phase)
@@ -168,7 +165,7 @@ bool PointTracker::track(const vector<Vec2f>& points, float fov, float dt, int w
void PointTracker::predict(float dt)
{
// predict with constant velocity
- Matx33d R;
+ Matx33f R;
Rodrigues(dt*v_r, R);
X_CM.R = R*X_CM.R;
X_CM.t += dt * v_t;
@@ -235,7 +232,7 @@ bool PointTracker::find_correspondences(const vector<Vec2f>& points)
-void PointTracker::POSIT(float fov, int w, int h)
+void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos)
{
// XXX hack
this->fov = fov;
@@ -244,9 +241,9 @@ void PointTracker::POSIT(float fov, int w, int h)
std::vector<cv::Point3f> obj_points;
std::vector<cv::Point2f> img_points;
- obj_points.push_back(cv::Vec3f(0, 0, 0));
- obj_points.push_back(point_model->M01);
- obj_points.push_back(point_model->M02);
+ obj_points.push_back(headpos);
+ obj_points.push_back(point_model->M01 + headpos);
+ obj_points.push_back(point_model->M02 + headpos);
img_points.push_back(p[0]);
img_points.push_back(p[1]);
@@ -265,29 +262,17 @@ void PointTracker::POSIT(float fov, int w, int h)
cv::Mat dist_coeffs = cv::Mat::zeros(5, 1, CV_32FC1);
- bool lastp = !rvec.empty() && !tvec.empty();
+ bool lastp = !rvec.empty() && !tvec.empty() && !init_phase;
cv::solvePnP(obj_points, img_points, intrinsics, dist_coeffs, rvec, tvec, lastp, cv::ITERATIVE);
cv::Mat rmat;
cv::Rodrigues(rvec, rmat);
- // finally, find the closer solution
- cv::Mat expected = cv::Mat(X_CM.R);
- cv::Mat eye = cv::Mat::eye(3, 3, CV_64FC1);
- double dev1 = norm(eye - expected * rmat.t());
- double dev2 = norm(eye - expected * rmat);
-
- if (dev1 > dev2)
- {
- rmat = rmat.t();
- cv::Rodrigues(rmat, rvec);
- }
-
// apply results
for (int i = 0; i < 3; i++)
{
- X_CM.t[i] = tvec.at<double>(i) * 1e-2;
+ X_CM.t[i] = tvec.at<double>(i);
for (int j = 0; j < 3; j++)
X_CM.R(i, j) = rmat.at<double>(i, j);
}
diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h
index 823d75c0..e05e8f98 100644
--- a/FTNoIR_Tracker_PT/point_tracker.h
+++ b/FTNoIR_Tracker_PT/point_tracker.h
@@ -21,11 +21,11 @@
class FrameTrafo
{
public:
- FrameTrafo() : R(cv::Matx33d::eye()), t(0,0,0) {}
- FrameTrafo(const cv::Matx33d& R, const cv::Vec3d& t) : R(R),t(t) {}
+ FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {}
+ FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {}
- cv::Matx33d R;
- cv::Vec3d t;
+ cv::Matx33f R;
+ cv::Vec3f t;
};
inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y)
@@ -33,17 +33,17 @@ inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y)
return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t);
}
-inline FrameTrafo operator*(const cv::Matx33d& X, const FrameTrafo& Y)
+inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y)
{
return FrameTrafo(X*Y.R, X*Y.t);
}
-inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33d& Y)
+inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y)
{
return FrameTrafo(X.R*Y, X.t);
}
-inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3d& v)
+inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v)
{
return X.R*v + X.t;
}
@@ -90,7 +90,7 @@ public:
// track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5)
// f : (focal length)/(sensor width)
// dt : time since last call
- bool track(const std::vector<cv::Vec2f>& points, float fov, float dt, int w, int h);
+ bool track(const std::vector<cv::Vec2f>& points, float fov, float dt, int w, int h, const cv::Vec3f &headpos);
boost::shared_ptr<PointModel> point_model;
bool dynamic_pose_resolution;
@@ -136,7 +136,7 @@ protected:
void reset_velocities();
- void POSIT(float fov, int w, int h); // The POSIT algorithm, returns the number of iterations
+ void POSIT(float fov, int w, int h, const cv::Vec3f &headpos); // The POSIT algorithm, returns the number of iterations
bool init_phase;
float dt_valid; // time since last valid tracking result