summaryrefslogtreecommitdiffhomepage
path: root/tracker-pt
diff options
context:
space:
mode:
Diffstat (limited to 'tracker-pt')
-rw-r--r--tracker-pt/ftnoir_tracker_pt.cpp6
-rw-r--r--tracker-pt/numeric.hpp2
-rw-r--r--tracker-pt/point_extractor.cpp39
-rw-r--r--tracker-pt/point_extractor.h24
-rw-r--r--tracker-pt/point_tracker.cpp10
-rw-r--r--tracker-pt/point_tracker.h3
6 files changed, 43 insertions, 41 deletions
diff --git a/tracker-pt/ftnoir_tracker_pt.cpp b/tracker-pt/ftnoir_tracker_pt.cpp
index 2862efdb..0b012a5e 100644
--- a/tracker-pt/ftnoir_tracker_pt.cpp
+++ b/tracker-pt/ftnoir_tracker_pt.cpp
@@ -78,7 +78,7 @@ void Tracker_PT::run()
point_extractor.extract_points(frame, preview_frame, points);
point_count = points.size();
- f fx;
+ double fx;
cam_info.get_focal_length(fx);
const bool success = points.size() >= PointModel::N_POINTS;
@@ -91,6 +91,8 @@ void Tracker_PT::run()
s.dynamic_pose ? s.init_phase_timeout : 0);
ever_success = true;
}
+ else
+ point_tracker.invalidate_pose();
{
Affine X_CM;
@@ -102,7 +104,7 @@ void Tracker_PT::run()
Affine X_MH(mat33::eye(), vec3(s.t_MH_x, s.t_MH_y, s.t_MH_z)); // just copy pasted these lines from below
Affine X_GH = X_CM * X_MH;
vec3 p = X_GH.t; // head (center?) position in global space
- vec2 p_(p[0] / p[2] * fx, p[1] / p[2] * fx); // projected to screen
+ vec2 p_((p[0] * fx) / p[2], (p[1] * fx) / p[2]); // projected to screen
static constexpr int len = 9;
diff --git a/tracker-pt/numeric.hpp b/tracker-pt/numeric.hpp
index 9d37086d..c9a553f3 100644
--- a/tracker-pt/numeric.hpp
+++ b/tracker-pt/numeric.hpp
@@ -9,7 +9,7 @@ namespace types {
struct constants final
{
constants() = delete;
- static constexpr f eps = std::numeric_limits<f>::epsilon();
+ static constexpr f eps = f(1e-6);
};
template<int n> using vec = cv::Vec<f, n>;
diff --git a/tracker-pt/point_extractor.cpp b/tracker-pt/point_extractor.cpp
index 17b8d6a5..fbe2b936 100644
--- a/tracker-pt/point_extractor.cpp
+++ b/tracker-pt/point_extractor.cpp
@@ -17,13 +17,12 @@
#include <algorithm>
#include <cinttypes>
-using namespace types;
-
using std::sqrt;
using std::fmax;
-using std::round;
using std::min;
+using namespace pt_extractor_impl;
+
/*
http://en.wikipedia.org/wiki/Mean-shift
In this application the idea, is to eliminate any bias of the point estimate
@@ -40,24 +39,24 @@ corresponding location is a good candidate for the extracted point.
The idea similar to the window scaling suggested in Berglund et al. "Fast, bias-free
algorithm for tracking single particles with variable size and shape." (2008).
*/
-static cv::Vec2d MeanShiftIteration(const cv::Mat &frame_gray, const cv::Vec2d &current_center, double filter_width)
+static cv::Vec2d MeanShiftIteration(const cv::Mat &frame_gray, const vec2 &current_center, f filter_width)
{
// Most amazingling this function runs faster with doubles than with floats.
- const double s = 1.0 / filter_width;
+ const f s = 1 / filter_width;
- double m = 0;
- cv::Vec2d com(0.0, 0.0);
+ f m = 0;
+ vec2 com(0, 0);
for (int i = 0; i < frame_gray.rows; i++)
{
- auto frame_ptr = (uint8_t *)frame_gray.ptr(i);
+ const auto frame_ptr = (const std::uint8_t*)frame_gray.ptr(i);
for (int j = 0; j < frame_gray.cols; j++)
{
- double val = frame_ptr[j];
+ f val = frame_ptr[j];
val = val * val; // taking the square wights brighter parts of the image stronger.
{
- double dx = (j - current_center[0])*s;
- double dy = (i - current_center[1])*s;
- double f = fmax(0.0, 1.0 - dx*dx - dy*dy);
+ f dx = (j - current_center[0])*s;
+ f dy = (i - current_center[1])*s;
+ f f = fmax(0.0, 1 - dx*dx - dy*dy);
val *= f;
}
m += val;
@@ -65,9 +64,9 @@ static cv::Vec2d MeanShiftIteration(const cv::Mat &frame_gray, const cv::Vec2d &
com[1] += i * val;
}
}
- if (m > .1)
+ if (m > f(.1))
{
- com *= 1.0 / m;
+ com *= 1 / m;
return com;
}
else
@@ -114,7 +113,7 @@ void PointExtractor::extract_points(const cv::Mat& frame, cv::Mat& preview_frame
const double radius = fmax(0., (max_radius-min_radius) * s.threshold / 255 + min_radius);
const float* ptr = reinterpret_cast<const float*>(hist.ptr(0));
- const unsigned area = unsigned(round(3 * M_PI * radius*radius));
+ const unsigned area = uround(3 * M_PI * radius*radius);
const unsigned sz = unsigned(hist.cols * hist.rows);
unsigned thres = 1;
for (unsigned i = sz-1, cnt = 0; i > 1; i--)
@@ -198,7 +197,7 @@ void PointExtractor::extract_points(const cv::Mat& frame, cv::Mat& preview_frame
c_ = (cx+cy)/2;
cv::Point p(iround(b.pos[0] * cx), iround(b.pos[1] * cy));
- cv::circle(preview_frame, p, int((b.radius-1) * c_), cv::Scalar(255, 255, 0), 1, cv::LINE_AA);
+ cv::circle(preview_frame, p, iround((b.radius-2) * c_), cv::Scalar(255, 255, 0), 1, cv::LINE_AA);
cv::circle(preview_frame, p, 1, cv::Scalar(255, 255, 64), -1, cv::LINE_AA);
char buf[64];
@@ -237,9 +236,9 @@ end:
cv::Mat frame_roi = frame_gray(rect);
- static constexpr double radius_c = 1.5;
+ static constexpr f radius_c = 1.5;
- const double kernel_radius = b.radius * radius_c;
+ const f kernel_radius = b.radius * radius_c;
cv::Vec2d pos(b.pos[0] - rect.x, b.pos[1] - rect.y); // position relative to ROI.
for (int iter = 0; iter < 10; ++iter)
@@ -247,7 +246,7 @@ end:
cv::Vec2d com_new = MeanShiftIteration(frame_roi, pos, kernel_radius);
cv::Vec2d delta = com_new - pos;
pos = com_new;
- if (delta.dot(delta) < 1e-3)
+ if (delta.dot(delta) < 1e-2 * 1e-2)
break;
}
@@ -268,7 +267,7 @@ end:
}
}
-PointExtractor::blob::blob(double radius, const cv::Vec2d& pos, double brightness, cv::Rect& rect) :
+blob::blob(double radius, const cv::Vec2d& pos, double brightness, cv::Rect& rect) :
radius(radius), brightness(brightness), pos(pos), rect(rect)
{
//qDebug() << "radius" << radius << "pos" << pos[0] << pos[1];
diff --git a/tracker-pt/point_extractor.h b/tracker-pt/point_extractor.h
index f931edd5..0b179376 100644
--- a/tracker-pt/point_extractor.h
+++ b/tracker-pt/point_extractor.h
@@ -17,10 +17,19 @@
#include <vector>
-namespace impl {
+namespace pt_extractor_impl {
using namespace types;
+struct blob
+{
+ double radius, brightness;
+ vec2 pos;
+ cv::Rect rect;
+
+ blob(double radius, const cv::Vec2d& pos, double brightness, cv::Rect &rect);
+};
+
class PointExtractor final
{
public:
@@ -38,18 +47,9 @@ private:
cv::Mat hist;
cv::Mat frame_blobs;
- struct blob
- {
- double radius, brightness;
- vec2 pos;
- cv::Rect rect;
-
- blob(double radius, const cv::Vec2d& pos, double brightness, cv::Rect &rect);
- };
-
std::vector<blob> blobs;
};
-} // ns impl
+} // ns pt_extractor_impl
-using impl::PointExtractor;
+using pt_extractor_impl::PointExtractor;
diff --git a/tracker-pt/point_tracker.cpp b/tracker-pt/point_tracker.cpp
index cae68bf3..d3ff658d 100644
--- a/tracker-pt/point_tracker.cpp
+++ b/tracker-pt/point_tracker.cpp
@@ -91,13 +91,13 @@ PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2*
const PointModel& model,
const CamInfo& info)
{
- f fx; info.get_focal_length(fx);
+ double fx; info.get_focal_length(fx);
PointTracker::PointOrder p;
p[0] = project(vec3(0,0,0), fx);
p[1] = project(model.M01, fx);
p[2] = project(model.M02, fx);
- const int diagonal = int(std::sqrt(double(info.res_x*info.res_x + info.res_y*info.res_y)));
+ const int diagonal = int(std::sqrt(f(info.res_x*info.res_x + info.res_y*info.res_y)));
static constexpr int div = 100;
const int max_dist = diagonal / div; // 8 pixels for 640x480
@@ -142,7 +142,7 @@ void PointTracker::track(const std::vector<vec2>& points,
const CamInfo& info,
int init_phase_timeout)
{
- f fx;
+ double fx;
info.get_focal_length(fx);
PointOrder order;
@@ -196,12 +196,12 @@ int PointTracker::POSIT(const PointModel& model, const PointOrder& order, f foca
// The expected rotation used for resolving the ambiguity in POSIT:
// In every iteration step the rotation closer to R_expected is taken
- static const mat33 R_expected(mat33::eye());
+ static const mat33 R_expected(X_CM.R);
// initial pose = last (predicted) pose
vec3 k;
get_row(R_expected, 2, k);
- f Z0 = f(1000);
+ f Z0 = X_CM.t[2] < f(1e-4) ? f(1000) : X_CM.t[2];
f old_epsilon_1 = 0;
f old_epsilon_2 = 0;
diff --git a/tracker-pt/point_tracker.h b/tracker-pt/point_tracker.h
index 63caf0dd..496bcbbe 100644
--- a/tracker-pt/point_tracker.h
+++ b/tracker-pt/point_tracker.h
@@ -62,6 +62,7 @@ public:
Affine pose() { return X_CM; }
vec2 project(const vec3& v_M, f focal_length);
vec2 project(const vec3& v_M, f focal_length, const Affine& X_CM);
+ void invalidate_pose() { X_CM = Affine(); }
private:
// the points in model order
@@ -71,7 +72,7 @@ private:
PointOrder find_correspondences_previous(const vec2* points, const PointModel &model, const CamInfo& info);
int POSIT(const PointModel& point_model, const PointOrder& order, f focal_length); // The POSIT algorithm, returns the number of iterations
- Affine X_CM; // trafo from model to camera
+ Affine X_CM; // transform from model to camera
Timer t;
bool init_phase;