diff options
Diffstat (limited to 'tracker-pt')
-rw-r--r-- | tracker-pt/ftnoir_tracker_pt_dialog.cpp | 5 | ||||
-rw-r--r-- | tracker-pt/point_extractor.cpp | 2 | ||||
-rw-r--r-- | tracker-pt/point_tracker.cpp | 54 | ||||
-rw-r--r-- | tracker-pt/point_tracker.h | 7 |
4 files changed, 36 insertions, 32 deletions
diff --git a/tracker-pt/ftnoir_tracker_pt_dialog.cpp b/tracker-pt/ftnoir_tracker_pt_dialog.cpp index d3d68efc..a5108ec1 100644 --- a/tracker-pt/ftnoir_tracker_pt_dialog.cpp +++ b/tracker-pt/ftnoir_tracker_pt_dialog.cpp @@ -10,10 +10,7 @@ #include "cv/video-property-page.hpp" #include "compat/camera-names.hpp" -#include <opencv2/core/core.hpp> -#include <memory> -#include <vector> -#include <QMessageBox> +#include <opencv2/core.hpp> #include <QString> #include <QDebug> diff --git a/tracker-pt/point_extractor.cpp b/tracker-pt/point_extractor.cpp index 48816780..7cd46fe2 100644 --- a/tracker-pt/point_extractor.cpp +++ b/tracker-pt/point_extractor.cpp @@ -69,7 +69,7 @@ void PointExtractor::extract_points(cv::Mat& frame, std::vector<vec2>& points) const double radius = max(0., (max_radius-min_radius) * s.threshold / 256); const int area = int(round(3 * M_PI * (min_radius + radius)*(min_radius+radius))); auto ptr = reinterpret_cast<const float*>(hist.ptr(0)); - for (int i = sz-1; i > 0; i--) + for (int i = sz-1; i > 1; i--) { cnt += ptr[i]; if (cnt >= area) diff --git a/tracker-pt/point_tracker.cpp b/tracker-pt/point_tracker.cpp index 7873a010..d2071556 100644 --- a/tracker-pt/point_tracker.cpp +++ b/tracker-pt/point_tracker.cpp @@ -15,6 +15,7 @@ using namespace pt_types; #include <vector> #include <algorithm> #include <cmath> +#include <iterator> #include <QDebug> @@ -67,17 +68,17 @@ void PointModel::set_model(settings_pt& s) } } -void PointModel::get_d_order(const std::vector<vec2>& points, int* d_order, const vec2& d) const +void PointModel::get_d_order(const vec2* points, unsigned* d_order, const vec2& d) const { // fit line to orthographically projected points - using t = std::pair<f,int>; - std::vector<t> d_vals; + using t = std::pair<f,unsigned>; + t d_vals[3]; // get sort indices with respect to d scalar product for (unsigned i = 0; i < PointModel::N_POINTS; ++i) - d_vals.push_back(std::pair<f, int>(d.dot(points[i]), i)); + d_vals[i] = t(d.dot(points[i]), i); - std::sort(d_vals.begin(), - d_vals.end(), + std::sort(d_vals, + d_vals + 3u, [](const t& a, const t& b) { return a.first < b.first; }); for (unsigned i = 0; i < PointModel::N_POINTS; ++i) @@ -89,7 +90,7 @@ PointTracker::PointTracker() : init_phase(true) { } -PointTracker::PointOrder PointTracker::find_correspondences_previous(const std::vector<vec2>& points, +PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2* points, const PointModel& model, f focal_length, int w, @@ -157,33 +158,33 @@ void PointTracker::track(const std::vector<vec2>& points, } if (!dynamic_pose || init_phase) - order = find_correspondences(points, model); + order = find_correspondences(points.data(), model); else + order = find_correspondences_previous(points.data(), model, focal_length, w, h); + + if (POSIT(model, order, focal_length) != -1) { - order = find_correspondences_previous(points, model, focal_length, w, h); + init_phase = false; + t.start(); } - - POSIT(model, order, focal_length); - init_phase = false; - t.start(); } -PointTracker::PointOrder PointTracker::find_correspondences(const std::vector<vec2>& points, const PointModel& model) +PointTracker::PointOrder PointTracker::find_correspondences(const vec2* points, const PointModel& model) { + static const Affine a(mat33::eye(), vec3(0, 0, 1)); // We do a simple freetrack-like sorting in the init phase... + unsigned point_d_order[PointModel::N_POINTS]; + unsigned model_d_order[PointModel::N_POINTS]; // sort points - int point_d_order[PointModel::N_POINTS]; - int model_d_order[PointModel::N_POINTS]; vec2 d(model.M01[0]-model.M02[0], model.M01[1]-model.M02[1]); model.get_d_order(points, point_d_order, d); // calculate d and d_order for simple freetrack-like point correspondence - model.get_d_order(std::vector<vec2> { - vec2{0,0}, - vec2(model.M01[0], model.M01[1]), - vec2(model.M02[0], model.M02[1]) - }, - model_d_order, - d); + vec2 pts[3] = { + vec2(0, 0), + vec2(model.M01[0], model.M01[1]), + vec2(model.M02[0], model.M02[1]) + }; + model.get_d_order(pts, model_d_order, d); // set correspondences PointOrder p; for (unsigned i = 0; i < PointModel::N_POINTS; ++i) @@ -200,7 +201,7 @@ int PointTracker::POSIT(const PointModel& model, const PointOrder& order, f foca // The expected rotation used for resolving the ambiguity in POSIT: // In every iteration step the rotation closer to R_expected is taken - mat33 R_expected = mat33::eye(); + static const mat33 R_expected(mat33::eye()); // initial pose = last (predicted) pose vec3 k; @@ -345,6 +346,11 @@ int PointTracker::POSIT(const PointModel& model, const PointOrder& order, f foca vec2 PointTracker::project(const vec3& v_M, f focal_length) { + return project(v_M, focal_length, X_CM); +} + +vec2 PointTracker::project(const vec3& v_M, f focal_length, const Affine& X_CM) +{ vec3 v_C = X_CM * v_M; return vec2(focal_length*v_C[0]/v_C[2], focal_length*v_C[1]/v_C[2]); } diff --git a/tracker-pt/point_tracker.h b/tracker-pt/point_tracker.h index 5576b41b..9530c338 100644 --- a/tracker-pt/point_tracker.h +++ b/tracker-pt/point_tracker.h @@ -70,7 +70,7 @@ public: PointModel(settings_pt& s); void set_model(settings_pt& s); - void get_d_order(const std::vector<vec2>& points, int* d_order, const vec2& d) const; + void get_d_order(const vec2* points, unsigned* d_order, const vec2& d) const; }; // ---------------------------------------------------------------------------- @@ -87,13 +87,14 @@ public: void track(const std::vector<vec2>& projected_points, const PointModel& model, f focal_length, bool dynamic_pose, int init_phase_timeout, int w, int h); Affine pose() { return X_CM; } vec2 project(const vec3& v_M, f focal_length); + vec2 project(const vec3& v_M, f focal_length, const Affine& X_CM); private: // the points in model order using PointOrder = std::array<vec2, 3>; - PointOrder find_correspondences(const std::vector<vec2>& projected_points, const PointModel &model); - PointOrder find_correspondences_previous(const std::vector<vec2>& points, const PointModel &model, f focal_length, int w, int h); + PointOrder find_correspondences(const vec2* projected_points, const PointModel &model); + PointOrder find_correspondences_previous(const vec2* points, const PointModel &model, f focal_length, int w, int h); int POSIT(const PointModel& point_model, const PointOrder& order, f focal_length); // The POSIT algorithm, returns the number of iterations Affine X_CM; // trafo from model to camera |