summaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorStéphane Lenclud <github@lenclud.com>2019-04-01 20:18:02 +0200
committerStéphane Lenclud <github@lenclud.com>2019-04-24 18:46:12 +0200
commit456a922b5995f1f836c13c5795258bc83e521571 (patch)
tree688b70cea01ad5abaf16a62f9c121f5b17086918
parenteb77f73826a26ad1dea84e3ea0173bb952a46e26 (diff)
Easy Tracker: Preview cross hair now showing top point.
Deleting home made P3P solver we branched from pt now that our OpenCV solution is working.
-rw-r--r--tracker-points/ftnoir_tracker_pt.cpp120
-rw-r--r--tracker-points/ftnoir_tracker_pt.h17
-rw-r--r--tracker-points/ftnoir_tracker_pt_dialog.cpp45
-rw-r--r--tracker-points/ftnoir_tracker_pt_dialog.h8
-rw-r--r--tracker-points/lang/nl_NL.ts2
-rw-r--r--tracker-points/lang/ru_RU.ts2
-rw-r--r--tracker-points/lang/stub.ts2
-rw-r--r--tracker-points/lang/zh_CN.ts2
-rw-r--r--tracker-points/module/module.cpp4
-rw-r--r--tracker-points/module/point_extractor.cpp6
-rw-r--r--tracker-points/point_tracker.cpp364
-rw-r--r--tracker-points/point_tracker.h88
-rw-r--r--tracker-points/pt-api.hpp2
13 files changed, 72 insertions, 590 deletions
diff --git a/tracker-points/ftnoir_tracker_pt.cpp b/tracker-points/ftnoir_tracker_pt.cpp
index 61900045..947970c3 100644
--- a/tracker-points/ftnoir_tracker_pt.cpp
+++ b/tracker-points/ftnoir_tracker_pt.cpp
@@ -1,5 +1,6 @@
/* Copyright (c) 2012 Patrick Ruoff
* Copyright (c) 2014-2016 Stanislaw Halik <sthalik@misaki.pl>
+ * Copyright (c) 2019 Stephane Lenclud
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
@@ -26,7 +27,7 @@ using namespace options;
namespace pt_impl {
-Tracker_PT::Tracker_PT(pointer<pt_runtime_traits> const& traits) :
+EasyTracker::EasyTracker(pointer<pt_runtime_traits> const& traits) :
traits { traits },
s { traits->get_module_name() },
point_extractor { traits->make_point_extractor() },
@@ -37,14 +38,14 @@ Tracker_PT::Tracker_PT(pointer<pt_runtime_traits> const& traits) :
cv::setBreakOnError(true);
cv::setNumThreads(1);
- connect(s.b.get(), &bundle_::saving, this, &Tracker_PT::maybe_reopen_camera, Qt::DirectConnection);
- connect(s.b.get(), &bundle_::reloading, this, &Tracker_PT::maybe_reopen_camera, Qt::DirectConnection);
+ connect(s.b.get(), &bundle_::saving, this, &EasyTracker::maybe_reopen_camera, Qt::DirectConnection);
+ connect(s.b.get(), &bundle_::reloading, this, &EasyTracker::maybe_reopen_camera, Qt::DirectConnection);
- connect(&s.fov, value_::value_changed<int>(), this, &Tracker_PT::set_fov, Qt::DirectConnection);
+ connect(&s.fov, value_::value_changed<int>(), this, &EasyTracker::set_fov, Qt::DirectConnection);
set_fov(s.fov);
}
-Tracker_PT::~Tracker_PT()
+EasyTracker::~EasyTracker()
{
requestInterruption();
wait();
@@ -54,7 +55,7 @@ Tracker_PT::~Tracker_PT()
}
-// Calculates rotation matrix to euler angles
+// Compute Euler angles from ratation matrix
cv::Vec3f EulerAngles(cv::Mat &R)
{
@@ -101,7 +102,7 @@ void getEulerAngles(cv::Mat &rotCamerMatrix, cv::Vec3d &eulerAngles)
}
-void Tracker_PT::run()
+void EasyTracker::run()
{
maybe_reopen_camera();
@@ -128,30 +129,24 @@ void Tracker_PT::run()
point_extractor->extract_points(*frame, *preview_frame, points, iImagePoints);
point_count.store(points.size(), std::memory_order_relaxed);
- const bool success = points.size() >= PointModel::N_POINTS;
+ const bool success = points.size() >= KPointCount;
- Affine X_CM;
+ int topPointIndex = -1;
{
QMutexLocker l(&center_lock);
if (success)
{
- int dynamic_pose_ms = s.dynamic_pose && s.active_model_panel != PointModel::Clip
- ? s.init_phase_timeout
- : 0;
-
- point_tracker.track(points,
- PointModel(s),
- info,
- dynamic_pose_ms);
ever_success.store(true, std::memory_order_relaxed);
// Solve P3P problem with OpenCV
// Construct the points defining the object we want to detect based on settings.
- // We are converting them from millimeters to meters.
- // TODO: Need to support clip too. That's cap only for now.
+ // We are converting them from millimeters to centimeters.
+ // TODO: Need to support clip too. That's cap only for now.
+ // s.active_model_panel != PointModel::Clip
+
std::vector<cv::Point3f> objectPoints;
objectPoints.push_back(cv::Point3f(s.cap_x/10.0, s.cap_z / 10.0, -s.cap_y / 10.0)); // Right
objectPoints.push_back(cv::Point3f(-s.cap_x/10.0, s.cap_z / 10.0, -s.cap_y / 10.0)); // Left
@@ -161,7 +156,7 @@ void Tracker_PT::run()
std::vector<cv::Point2f> trackedPoints;
// Stuff bitmap point in there making sure they match the order of the object point
// Find top most point, that's the one with min Y as we assume our guy's head is not up side down
- int topPointIndex = -1;
+
int minY = std::numeric_limits<int>::max();
for (int i = 0; i < 3; i++)
{
@@ -275,15 +270,10 @@ void Tracker_PT::run()
}
- // TODO: Work out rotation angles
- // TODO: Choose the one solution that makes sense for us
-
-
-
}
+ // Send solution data back to main thread
QMutexLocker l2(&data_lock);
- X_CM = point_tracker.pose();
if (iBestSolutionIndex != -1)
{
iBestAngles = iAngles[iBestSolutionIndex];
@@ -294,14 +284,12 @@ void Tracker_PT::run()
if (preview_visible)
{
- const f fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y);
- Affine X_MH(mat33::eye(), vec3(s.t_MH_x, s.t_MH_y, s.t_MH_z));
- Affine X_GH = X_CM * X_MH;
- vec3 p = X_GH.t; // head (center?) position in global space
-
- if (p[2] > f(.1))
- preview_frame->draw_head_center((p[0] * fx) / p[2], (p[1] * fx) / p[2]);
-
+ if (topPointIndex != -1)
+ {
+ // Render a cross to indicate which point is the head
+ preview_frame->draw_head_center(points[topPointIndex][0], points[topPointIndex][1]);
+ }
+
widget->update_image(preview_frame->get_bitmap());
auto [ w, h ] = widget->preview_size();
@@ -315,7 +303,7 @@ void Tracker_PT::run()
}
}
-bool Tracker_PT::maybe_reopen_camera()
+bool EasyTracker::maybe_reopen_camera()
{
QMutexLocker l(&camera_mtx);
@@ -323,13 +311,13 @@ bool Tracker_PT::maybe_reopen_camera()
s.cam_fps, s.cam_res_x, s.cam_res_y);
}
-void Tracker_PT::set_fov(int value)
+void EasyTracker::set_fov(int value)
{
QMutexLocker l(&camera_mtx);
camera->set_fov(value);
}
-module_status Tracker_PT::start_tracker(QFrame* video_frame)
+module_status EasyTracker::start_tracker(QFrame* video_frame)
{
//video_frame->setAttribute(Qt::WA_NativeWindow);
@@ -346,50 +334,11 @@ module_status Tracker_PT::start_tracker(QFrame* video_frame)
return {};
}
-void Tracker_PT::data(double *data)
+void EasyTracker::data(double *data)
{
if (ever_success.load(std::memory_order_relaxed))
{
- Affine X_CM;
- {
- QMutexLocker l(&data_lock);
- X_CM = point_tracker.pose();
- }
-
- Affine X_MH(mat33::eye(), vec3(s.t_MH_x, s.t_MH_y, s.t_MH_z));
- Affine X_GH(X_CM * X_MH);
-
- // translate rotation matrix from opengl (G) to roll-pitch-yaw (E) frame
- // -z -> x, y -> z, x -> -y
- mat33 R_EG(0, 0,-1,
- -1, 0, 0,
- 0, 1, 0);
- mat33 R(R_EG * X_GH.R * R_EG.t());
-
- // get translation(s)
- const vec3& t = X_GH.t;
-
- // extract rotation angles
- auto r00 = (double)R(0, 0);
- auto r10 = (double)R(1,0), r20 = (double)R(2,0);
- auto r21 = (double)R(2,1), r22 = (double)R(2,2);
-
- double beta = atan2(-r20, sqrt(r21*r21 + r22*r22));
- double alpha = atan2(r10, r00);
- double gamma = atan2(r21, r22);
-
- constexpr double rad2deg = 180/M_PI;
-
- data[Yaw] = rad2deg * alpha;
- data[Pitch] = -rad2deg * beta;
- data[Roll] = rad2deg * gamma;
-
- // convert to cm
- data[TX] = (double)t[0] / 10;
- data[TY] = (double)t[1] / 10;
- data[TZ] = (double)t[2] / 10;
-
-
+ // Get data back from tracker thread
QMutexLocker l(&data_lock);
data[Yaw] = iBestAngles[1];
data[Pitch] = iBestAngles[0];
@@ -397,24 +346,22 @@ void Tracker_PT::data(double *data)
data[TX] = iBestTranslation[0];
data[TY] = iBestTranslation[1];
data[TZ] = iBestTranslation[2];
-
}
}
-bool Tracker_PT::center()
+bool EasyTracker::center()
{
QMutexLocker l(&center_lock);
-
- point_tracker.reset_state();
+ //TODO: Do we need to do anything there?
return false;
}
-int Tracker_PT::get_n_points()
+int EasyTracker::get_n_points()
{
return (int)point_count.load(std::memory_order_relaxed);
}
-bool Tracker_PT::get_cam_info(pt_camera_info& info)
+bool EasyTracker::get_cam_info(pt_camera_info& info)
{
QMutexLocker l(&camera_mtx);
bool ret;
@@ -423,10 +370,5 @@ bool Tracker_PT::get_cam_info(pt_camera_info& info)
return ret;
}
-Affine Tracker_PT::pose() const
-{
- QMutexLocker l(&data_lock);
- return point_tracker.pose();
-}
} // ns pt_impl
diff --git a/tracker-points/ftnoir_tracker_pt.h b/tracker-points/ftnoir_tracker_pt.h
index 9388bd03..0aba736c 100644
--- a/tracker-points/ftnoir_tracker_pt.h
+++ b/tracker-points/ftnoir_tracker_pt.h
@@ -10,7 +10,6 @@
#include "api/plugin-api.hpp"
#include "pt-api.hpp"
-#include "point_tracker.h"
#include "cv/numeric.hpp"
#include "video/video-widget.hpp"
@@ -26,25 +25,24 @@
namespace pt_impl {
-class TrackerDialog_PT;
+class EasyTrackerDialog;
using namespace numeric_types;
-struct Tracker_PT : QThread, ITracker
+struct EasyTracker : QThread, ITracker
{
- friend class TrackerDialog_PT;
+ friend class EasyTrackerDialog;
template<typename t> using pointer = pt_pointer<t>;
- explicit Tracker_PT(pointer<pt_runtime_traits> const& pt_runtime_traits);
- ~Tracker_PT() override;
+ explicit EasyTracker(pointer<pt_runtime_traits> const& pt_runtime_traits);
+ ~EasyTracker() override;
module_status start_tracker(QFrame* parent_window) override;
void data(double* data) override;
bool center() override;
int get_n_points();
[[nodiscard]] bool get_cam_info(pt_camera_info& info);
- Affine pose() const;
private:
void run() override;
@@ -56,7 +54,6 @@ private:
QMutex camera_mtx;
- PointTracker point_tracker;
pt_settings s;
@@ -82,7 +79,7 @@ private:
std::vector<cv::Mat> iRotations;
// Angle solutions, pitch, yaw, roll, in this order
std::vector<cv::Vec3d> iAngles;
- // The index of our best solution
+ // The index of our best solution in the above arrays
int iBestSolutionIndex = -1;
// Best translation
cv::Vec3d iBestTranslation;
@@ -92,4 +89,4 @@ private:
} // ns pt_impl
-using Tracker_PT = pt_impl::Tracker_PT;
+using Tracker_PT = pt_impl::EasyTracker;
diff --git a/tracker-points/ftnoir_tracker_pt_dialog.cpp b/tracker-points/ftnoir_tracker_pt_dialog.cpp
index edf689a9..f56d3014 100644
--- a/tracker-points/ftnoir_tracker_pt_dialog.cpp
+++ b/tracker-points/ftnoir_tracker_pt_dialog.cpp
@@ -22,7 +22,7 @@ static void init_resources() { Q_INIT_RESOURCE(tracker_pt_base); }
namespace pt_impl {
-TrackerDialog_PT::TrackerDialog_PT(const QString& module_name) :
+EasyTrackerDialog::EasyTrackerDialog(const QString& module_name) :
s(module_name),
tracker(nullptr),
timer(this),
@@ -80,19 +80,19 @@ TrackerDialog_PT::TrackerDialog_PT(const QString& module_name) :
connect(ui.buttonBox, SIGNAL(accepted()), this, SLOT(doOK()));
connect(ui.buttonBox, SIGNAL(rejected()), this, SLOT(doCancel()));
- connect(ui.camdevice_combo, &QComboBox::currentTextChanged, this, &TrackerDialog_PT::set_camera_settings_available);
+ connect(ui.camdevice_combo, &QComboBox::currentTextChanged, this, &EasyTrackerDialog::set_camera_settings_available);
set_camera_settings_available(ui.camdevice_combo->currentText());
- connect(ui.camera_settings, &QPushButton::clicked, this, &TrackerDialog_PT::show_camera_settings);
+ connect(ui.camera_settings, &QPushButton::clicked, this, &EasyTrackerDialog::show_camera_settings);
- connect(&timer, &QTimer::timeout, this, &TrackerDialog_PT::poll_tracker_info_impl);
+ connect(&timer, &QTimer::timeout, this, &EasyTrackerDialog::poll_tracker_info_impl);
timer.setInterval(250);
- connect(&calib_timer, &QTimer::timeout, this, &TrackerDialog_PT::trans_calib_step);
+ connect(&calib_timer, &QTimer::timeout, this, &EasyTrackerDialog::trans_calib_step);
calib_timer.setInterval(35);
poll_tracker_info_impl();
- connect(this, &TrackerDialog_PT::poll_tracker_info, this, &TrackerDialog_PT::poll_tracker_info_impl, Qt::DirectConnection);
+ connect(this, &EasyTrackerDialog::poll_tracker_info, this, &EasyTrackerDialog::poll_tracker_info_impl, Qt::DirectConnection);
constexpr pt_color_type color_types[] = {
pt_color_average,
@@ -117,7 +117,7 @@ TrackerDialog_PT::TrackerDialog_PT(const QString& module_name) :
[this](bool) { s.threshold_slider.notify(); });
}
-QString TrackerDialog_PT::threshold_display_text(int threshold_value)
+QString EasyTrackerDialog::threshold_display_text(int threshold_value)
{
if (!s.auto_threshold)
return tr("Brightness %1/255").arg(threshold_value);
@@ -144,7 +144,7 @@ QString TrackerDialog_PT::threshold_display_text(int threshold_value)
}
}
-void TrackerDialog_PT::startstop_trans_calib(bool start)
+void EasyTrackerDialog::startstop_trans_calib(bool start)
{
QMutexLocker l(&calibrator_mutex);
@@ -200,7 +200,7 @@ void TrackerDialog_PT::startstop_trans_calib(bool start)
ui.tcalib_button->setText(tr("Start calibration"));
}
-void TrackerDialog_PT::poll_tracker_info_impl()
+void EasyTrackerDialog::poll_tracker_info_impl()
{
pt_camera_info info;
if (tracker && tracker->get_cam_info(info))
@@ -218,12 +218,12 @@ void TrackerDialog_PT::poll_tracker_info_impl()
}
}
-void TrackerDialog_PT::set_camera_settings_available(const QString& /* camera_name */)
+void EasyTrackerDialog::set_camera_settings_available(const QString& /* camera_name */)
{
ui.camera_settings->setEnabled(true);
}
-void TrackerDialog_PT::show_camera_settings()
+void EasyTrackerDialog::show_camera_settings()
{
if (tracker)
{
@@ -234,44 +234,37 @@ void TrackerDialog_PT::show_camera_settings()
(void)video::show_dialog(s.camera_name);
}
-void TrackerDialog_PT::trans_calib_step()
+void EasyTrackerDialog::trans_calib_step()
{
QMutexLocker l(&calibrator_mutex);
-
- if (tracker)
- {
- Affine X_CM = tracker->pose();
- trans_calib.update(X_CM.R, X_CM.t);
- }
- else
- startstop_trans_calib(false);
+ // TODO: Do we still need that function
}
-void TrackerDialog_PT::save()
+void EasyTrackerDialog::save()
{
s.b->save();
}
-void TrackerDialog_PT::doOK()
+void EasyTrackerDialog::doOK()
{
save();
close();
}
-void TrackerDialog_PT::doCancel()
+void EasyTrackerDialog::doCancel()
{
close();
}
-void TrackerDialog_PT::register_tracker(ITracker *t)
+void EasyTrackerDialog::register_tracker(ITracker *t)
{
- tracker = static_cast<Tracker_PT*>(t);
+ tracker = static_cast<EasyTracker*>(t);
ui.tcalib_button->setEnabled(true);
poll_tracker_info();
timer.start();
}
-void TrackerDialog_PT::unregister_tracker()
+void EasyTrackerDialog::unregister_tracker()
{
tracker = nullptr;
ui.tcalib_button->setEnabled(false);
diff --git a/tracker-points/ftnoir_tracker_pt_dialog.h b/tracker-points/ftnoir_tracker_pt_dialog.h
index 66981ee6..4f6d956f 100644
--- a/tracker-points/ftnoir_tracker_pt_dialog.h
+++ b/tracker-points/ftnoir_tracker_pt_dialog.h
@@ -19,11 +19,11 @@
namespace pt_impl {
-class TrackerDialog_PT : public ITrackerDialog
+class EasyTrackerDialog : public ITrackerDialog
{
Q_OBJECT
public:
- TrackerDialog_PT(const QString& module_name);
+ EasyTrackerDialog(const QString& module_name);
void register_tracker(ITracker *tracker) override;
void unregister_tracker() override;
void save();
@@ -42,7 +42,7 @@ protected:
QString threshold_display_text(int threshold_value);
pt_settings s;
- Tracker_PT* tracker;
+ EasyTracker* tracker;
QTimer timer, calib_timer;
TranslationCalibrator trans_calib;
QMutex calibrator_mutex;
@@ -52,4 +52,4 @@ protected:
} // ns pt_impl
-using TrackerDialog_PT = pt_impl::TrackerDialog_PT;
+using TrackerDialog_PT = pt_impl::EasyTrackerDialog;
diff --git a/tracker-points/lang/nl_NL.ts b/tracker-points/lang/nl_NL.ts
index 5897a731..f252bf6f 100644
--- a/tracker-points/lang/nl_NL.ts
+++ b/tracker-points/lang/nl_NL.ts
@@ -234,7 +234,7 @@ Don&apos;t roll or change position.</source>
</message>
</context>
<context>
- <name>pt_impl::TrackerDialog_PT</name>
+ <name>pt_impl::EasyTrackerDialog</name>
<message>
<source>Brightness %1/255</source>
<translation type="unfinished"></translation>
diff --git a/tracker-points/lang/ru_RU.ts b/tracker-points/lang/ru_RU.ts
index 8a06ebac..f4c91b04 100644
--- a/tracker-points/lang/ru_RU.ts
+++ b/tracker-points/lang/ru_RU.ts
@@ -239,7 +239,7 @@ ROLL или X/Y-смещения.</translation>
</message>
</context>
<context>
- <name>pt_impl::TrackerDialog_PT</name>
+ <name>pt_impl::EasyTrackerDialog</name>
<message>
<source>Brightness %1/255</source>
<translation type="unfinished"></translation>
diff --git a/tracker-points/lang/stub.ts b/tracker-points/lang/stub.ts
index 6c493c14..327bc2cf 100644
--- a/tracker-points/lang/stub.ts
+++ b/tracker-points/lang/stub.ts
@@ -234,7 +234,7 @@ Don&apos;t roll or change position.</source>
</message>
</context>
<context>
- <name>pt_impl::TrackerDialog_PT</name>
+ <name>pt_impl::EasyTrackerDialog</name>
<message>
<source>Brightness %1/255</source>
<translation type="unfinished"></translation>
diff --git a/tracker-points/lang/zh_CN.ts b/tracker-points/lang/zh_CN.ts
index 8212de68..65af75ed 100644
--- a/tracker-points/lang/zh_CN.ts
+++ b/tracker-points/lang/zh_CN.ts
@@ -234,7 +234,7 @@ Don&apos;t roll or change position.</source>
</message>
</context>
<context>
- <name>pt_impl::TrackerDialog_PT</name>
+ <name>pt_impl::EasyTrackerDialog</name>
<message>
<source>Brightness %1/255</source>
<translation type="unfinished">亮度 %1/255</translation>
diff --git a/tracker-points/module/module.cpp b/tracker-points/module/module.cpp
index 06cd003b..59e254ac 100644
--- a/tracker-points/module/module.cpp
+++ b/tracker-points/module/module.cpp
@@ -48,7 +48,7 @@ struct pt_module_traits final : pt_runtime_traits
struct tracker_pt : Tracker_PT
{
- tracker_pt() : Tracker_PT(pointer<pt_runtime_traits>(new pt_module_traits))
+ tracker_pt() : EasyTracker(pointer<pt_runtime_traits>(new pt_module_traits))
{
}
};
@@ -58,7 +58,7 @@ struct dialog_pt : TrackerDialog_PT
dialog_pt();
};
-dialog_pt::dialog_pt() : TrackerDialog_PT(module_name) {}
+dialog_pt::dialog_pt() : EasyTrackerDialog(module_name) {}
QString metadata_pt::name() { return tr("Points Tracker 0.1"); }
QIcon metadata_pt::icon() { return QIcon(":/Resources/Logo_IR.png"); }
diff --git a/tracker-points/module/point_extractor.cpp b/tracker-points/module/point_extractor.cpp
index d1975317..0d54a66b 100644
--- a/tracker-points/module/point_extractor.cpp
+++ b/tracker-points/module/point_extractor.cpp
@@ -7,7 +7,6 @@
*/
#include "point_extractor.h"
-#include "point_tracker.h"
#include "frame.hpp"
#include "cv/numeric.hpp"
@@ -214,7 +213,7 @@ static void draw_blobs(cv::Mat& preview_frame, const blob* blobs, unsigned nblob
cv::Point p(iround(b.pos[0] * cx * c_fract), iround(b.pos[1] * cy * c_fract));
- auto circle_color = k >= PointModel::N_POINTS
+ auto circle_color = k >= KPointCount
? cv::Scalar(192, 192, 192)
: cv::Scalar(255, 255, 0);
@@ -228,7 +227,7 @@ static void draw_blobs(cv::Mat& preview_frame, const blob* blobs, unsigned nblob
buf[sizeof(buf)-1] = '\0';
std::snprintf(buf, sizeof(buf) - 1, "%.2fpx", (double)b.radius);
- auto text_color = k >= PointModel::N_POINTS
+ auto text_color = k >= KPointCount
? cv::Scalar(160, 160, 160)
: cv::Scalar(0, 0, 255);
@@ -359,6 +358,7 @@ end:
b.pos[1] = pos[1] + rect.y;
}
+ // TODO: Do not do that if no preview. Delay blob drawing until we know where are the points?
draw_blobs(preview_frame_.as<Frame>()->mat,
blobs.data(), blobs.size(),
frame_gray.size());
diff --git a/tracker-points/point_tracker.cpp b/tracker-points/point_tracker.cpp
deleted file mode 100644
index e209938f..00000000
--- a/tracker-points/point_tracker.cpp
+++ /dev/null
@@ -1,364 +0,0 @@
-/* Copyright (c) 2012 Patrick Ruoff
- *
- * Permission to use, copy, modify, and/or distribute this software for any
- * purpose with or without fee is hereby granted, provided that the above
- * copyright notice and this permission notice appear in all copies.
- */
-
-#include "point_tracker.h"
-#include "compat/math-imports.hpp"
-
-#include <vector>
-#include <algorithm>
-#include <cmath>
-
-#include <QDebug>
-
-namespace pt_impl {
-
-using namespace numeric_types;
-
-static void get_row(const mat33& m, int i, vec3& v)
-{
- v[0] = m(i,0);
- v[1] = m(i,1);
- v[2] = m(i,2);
-}
-
-static void set_row(mat33& m, int i, const vec3& v)
-{
- m(i,0) = v[0];
- m(i,1) = v[1];
- m(i,2) = v[2];
-}
-
-PointModel::PointModel(const pt_settings& s)
-{
- set_model(s);
- // calculate u
- u = M01.cross(M02);
- u = cv::normalize(u);
-
- // calculate projection matrix on M01,M02 plane
- f s11 = M01.dot(M01);
- f s12 = M01.dot(M02);
- f s22 = M02.dot(M02);
- P = 1/(s11*s22-s12*s12) * mat22(s22, -s12, -s12, s11);
-}
-
-void PointModel::set_model(const pt_settings& s)
-{
- switch (s.active_model_panel)
- {
- default:
- eval_once(qDebug() << "pt: wrong model type selected");
- [[fallthrough]];
- case Clip:
- M01 = vec3(0, s.clip_ty, -s.clip_tz);
- M02 = vec3(0, -s.clip_by, -s.clip_bz);
- break;
- case Cap:
- M01 = vec3(-s.cap_x, -s.cap_y, -s.cap_z);
- M02 = vec3(s.cap_x, -s.cap_y, -s.cap_z);
- break;
- case Custom:
- M01 = vec3(s.m01_x, s.m01_y, s.m01_z);
- M02 = vec3(s.m02_x, s.m02_y, s.m02_z);
- break;
- }
-}
-
-void PointModel::get_d_order(const vec2* points, unsigned* d_order, const vec2& d) const
-{
- constexpr unsigned cnt = PointModel::N_POINTS;
- // fit line to orthographically projected points
- using t = std::pair<f,unsigned>;
- t d_vals[cnt];
- // get sort indices with respect to d scalar product
- for (unsigned i = 0; i < cnt; ++i)
- d_vals[i] = t(d.dot(points[i]), i);
-
- std::sort(d_vals,
- d_vals + 3,
- [](const t& a, const t& b) { return a.first < b.first; });
-
- for (unsigned i = 0; i < cnt; ++i)
- d_order[i] = d_vals[i].second;
-}
-
-
-PointTracker::PointTracker() = default;
-
-PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2* points,
- const PointModel& model,
- const pt_camera_info& info)
-{
- const f fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y);
- PointTracker::PointOrder p;
- p[0] = project(vec3(0,0,0), fx);
- p[1] = project(model.M01, fx);
- p[2] = project(model.M02, fx);
-
- constexpr unsigned sz = PointModel::N_POINTS;
-
- // set correspondences by minimum distance to projected model point
- bool point_taken[sz] {};
-
- for (unsigned i=0; i < sz; ++i)
- {
- f min_sdist = 0;
- unsigned min_idx = 0;
- // find closest point to projected model point i
- for (unsigned j=0; j < sz; ++j)
- {
- vec2 d = p[i]-points[j];
- f sdist = d.dot(d);
- if (sdist < min_sdist || j == 0)
- {
- min_idx = j;
- min_sdist = sdist;
- }
- }
-
- // if one point is closest to more than one model point, fallback
- if (point_taken[min_idx])
- {
- reset_state();
- return find_correspondences(points, model);
- }
- point_taken[min_idx] = true;
- p[i] = points[min_idx];
- }
-
- return p;
-}
-
-void PointTracker::track(const std::vector<vec2>& points,
- const PointModel& model,
- const pt_camera_info& info,
- int init_phase_timeout)
-{
- const f fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y);
- PointOrder order;
-
- if (init_phase || init_phase_timeout <= 0 || t.elapsed_ms() > init_phase_timeout)
- {
- reset_state();
- order = find_correspondences(points.data(), model);
- }
- else
- order = find_correspondences_previous(points.data(), model, info);
-
- if (POSIT(model, order, fx) != -1)
- {
- init_phase = false;
- t.start();
- }
- else
- reset_state();
-}
-
-PointTracker::PointOrder PointTracker::find_correspondences(const vec2* points, const PointModel& model)
-{
- constexpr unsigned cnt = PointModel::N_POINTS;
- // We do a simple freetrack-like sorting in the init phase...
- unsigned point_d_order[cnt];
- unsigned model_d_order[cnt];
- // calculate d and d_order for simple freetrack-like point correspondence
- vec2 d(model.M01[0]-model.M02[0], model.M01[1]-model.M02[1]);
- // sort points
- model.get_d_order(points, point_d_order, d);
- vec2 pts[cnt] {
- { 0, 0 },
- { model.M01[0], model.M01[1] },
- { model.M02[0], model.M02[1] },
- };
- model.get_d_order(pts, model_d_order, d);
-
- // set correspondences
- PointOrder p;
- for (unsigned i = 0; i < cnt; ++i)
- p[model_d_order[i]] = points[point_d_order[i]];
-
- return p;
-}
-
-#ifdef __clang__
-# pragma clang diagnostic push
-# pragma clang diagnostic ignored "-Wfloat-equal"
-#endif
-
-int PointTracker::POSIT(const PointModel& model, const PointOrder& order, f focal_length)
-{
- // POSIT algorithm for coplanar points as presented in
- // [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"]
- // we use the same notation as in the paper here
-
- // The expected rotation used for resolving the ambiguity in POSIT:
- // In every iteration step the rotation closer to R_expected is taken
- const mat33& R_expected{X_CM_expected.R};
-
- // initial pose = last (predicted) pose
- vec3 k;
- get_row(R_expected, 2, k);
- f Z0 = X_CM.t[2] < f(1e-4) ? f(1000) : X_CM.t[2];
-
- f old_epsilon_1 = 0;
- f old_epsilon_2 = 0;
- f epsilon_1, epsilon_2;
-
- vec3 I0, J0;
- vec2 I0_coeff, J0_coeff;
-
- vec3 I_1, J_1, I_2, J_2;
- mat33 R_1, R_2;
- mat33* R_current = &R_1;
-
- constexpr int max_iter = 100;
-
- int i;
- for (i = 1; i < max_iter; ++i)
- {
- epsilon_1 = k.dot(model.M01)/Z0;
- epsilon_2 = k.dot(model.M02)/Z0;
-
- // vector of scalar products <I0, M0i> and <J0, M0i>
- vec2 I0_M0i(order[1][0]*(1 + epsilon_1) - order[0][0],
- order[2][0]*(1 + epsilon_2) - order[0][0]);
- vec2 J0_M0i(order[1][1]*(1 + epsilon_1) - order[0][1],
- order[2][1]*(1 + epsilon_2) - order[0][1]);
-
- // construct projection of I, J onto M0i plane: I0 and J0
- I0_coeff = model.P * I0_M0i;
- J0_coeff = model.P * J0_M0i;
- I0 = I0_coeff[0]*model.M01 + I0_coeff[1]*model.M02;
- J0 = J0_coeff[0]*model.M01 + J0_coeff[1]*model.M02;
-
- // calculate u component of I, J
- f II0 = I0.dot(I0);
- f IJ0 = I0.dot(J0);
- f JJ0 = J0.dot(J0);
- f rho, theta;
- // CAVEAT don't change to comparison with an epsilon -sh 20160423
- if (JJ0 == II0) {
- rho = sqrt(fabs(2*IJ0));
- theta = -pi/4;
- if (IJ0<0) theta *= -1;
- }
- else {
- rho = sqrt(sqrt( (JJ0-II0)*(JJ0-II0) + 4*IJ0*IJ0 ));
- theta = atan( -2*IJ0 / (JJ0-II0) );
- // avoid branch misprediction
- theta += (JJ0 - II0 < 0) * pi;
- theta *= f(.5);
- }
-
- // construct the two solutions
- I_1 = I0 + rho*cos(theta)*model.u;
- I_2 = I0 - rho*cos(theta)*model.u;
-
- J_1 = J0 + rho*sin(theta)*model.u;
- J_2 = J0 - rho*sin(theta)*model.u;
-
- f norm_const = (f)(1/cv::norm(I_1)); // all have the same norm
-
- // create rotation matrices
- I_1 *= norm_const; J_1 *= norm_const;
- I_2 *= norm_const; J_2 *= norm_const;
-
- set_row(R_1, 0, I_1);
- set_row(R_1, 1, J_1);
- set_row(R_1, 2, I_1.cross(J_1));
-
- set_row(R_2, 0, I_2);
- set_row(R_2, 1, J_2);
- set_row(R_2, 2, I_2.cross(J_2));
-
- // the single translation solution
- Z0 = norm_const * focal_length;
-
- // pick the rotation solution closer to the expected one
- // in simple metric d(A,B) = || I - A * B^T ||
- f R_1_deviation = (f)(cv::norm(mat33::eye() - R_expected * R_1.t()));
- f R_2_deviation = (f)(cv::norm(mat33::eye() - R_expected * R_2.t()));
-
- if (R_1_deviation < R_2_deviation)
- R_current = &R_1;
- else
- R_current = &R_2;
-
- get_row(*R_current, 2, k);
-
- // check for convergence condition
- const f delta = fabs(epsilon_1 - old_epsilon_1) + fabs(epsilon_2 - old_epsilon_2);
-
- if (delta < eps)
- break;
-
- old_epsilon_1 = epsilon_1;
- old_epsilon_2 = epsilon_2;
- }
-
- const f t[3] = {
- order[0][0] * Z0/focal_length,
- order[0][1] * Z0/focal_length,
- Z0
- };
- const mat33& r = *R_current;
-
- for (int i = 0; i < 3; i++)
- for (int j = 0; j < 3; j++)
- {
- int ret = std::fpclassify(r(i, j));
- if (ret == FP_NAN || ret == FP_INFINITE)
- {
- qDebug() << "posit nan R";
- return -1;
- }
- }
-
- for (unsigned i = 0; i < 3; i++) // NOLINT(modernize-loop-convert)
- {
- int ret = std::fpclassify(t[i]);
- if (ret == FP_NAN || ret == FP_INFINITE)
- {
- qDebug() << "posit nan T";
- return -1;
- }
- }
-
- // apply results
- X_CM.R = r;
- X_CM.t[0] = t[0];
- X_CM.t[1] = t[1];
- X_CM.t[2] = t[2];
-
- X_CM_expected = X_CM;
-
- //qDebug() << "iter:" << i;
-
- return i;
-}
-
-#ifdef __clang__
-# pragma clang diagnostic pop
-#endif
-
-vec2 PointTracker::project(const vec3& v_M, f focal_length)
-{
- return project(v_M, focal_length, X_CM);
-}
-
-vec2 PointTracker::project(const vec3& v_M, f focal_length, const Affine& X_CM)
-{
- vec3 v_C = X_CM * v_M;
- return vec2(focal_length*v_C[0]/v_C[2], focal_length*v_C[1]/v_C[2]);
-}
-
-void PointTracker::reset_state()
-{
- init_phase = true;
- X_CM_expected = {};
-}
-
-} // ns pt_impl
diff --git a/tracker-points/point_tracker.h b/tracker-points/point_tracker.h
deleted file mode 100644
index 70c7a9fc..00000000
--- a/tracker-points/point_tracker.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/* Copyright (c) 2012 Patrick Ruoff
- *
- * Permission to use, copy, modify, and/or distribute this software for any
- * purpose with or without fee is hereby granted, provided that the above
- * copyright notice and this permission notice appear in all copies.
- */
-
-#pragma once
-
-#include "compat/timer.hpp"
-#include "cv/affine.hpp"
-#include "cv/numeric.hpp"
-#include "pt-api.hpp"
-
-#include <cstddef>
-#include <memory>
-#include <vector>
-#include <array>
-
-#include <opencv2/core.hpp>
-
-#include <QObject>
-
-namespace pt_impl {
-
-// ----------------------------------------------------------------------------
-// Describes a 3-point model
-// nomenclature as in
-// [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"]
-
-using namespace numeric_types;
-
-struct PointModel final
-{
- static constexpr unsigned N_POINTS = 3;
-
- vec3 M01; // M01 in model frame
- vec3 M02; // M02 in model frame
-
- vec3 u; // unit vector perpendicular to M01,M02-plane
-
- mat22 P;
-
- enum Model { Clip, Cap, Custom };
-
- explicit PointModel(const pt_settings& s);
- void set_model(const pt_settings& s);
-
- void get_d_order(const vec2* points, unsigned* d_order, const vec2& d) const;
-};
-
-// ----------------------------------------------------------------------------
-// Tracks a 3-point model
-// implementing the POSIT algorithm for coplanar points as presented in
-// [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"]
-class PointTracker final
-{
-public:
- PointTracker();
- // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5)
- // f : (focal length)/(sensor width)
- // dt : time since last call
- void track(const std::vector<vec2>& projected_points, const PointModel& model, const pt_camera_info& info, int init_phase_timeout);
- Affine pose() const { return X_CM; }
- vec2 project(const vec3& v_M, f focal_length);
- vec2 project(const vec3& v_M, f focal_length, const Affine& X_CM);
- void reset_state();
-
-private:
- // the points in model order
- using PointOrder = std::array<vec2, 3>;
-
- PointOrder find_correspondences(const vec2* projected_points, const PointModel &model);
- PointOrder find_correspondences_previous(const vec2* points, const PointModel &model, const pt_camera_info& info);
- // The POSIT algorithm, returns the number of iterations
- int POSIT(const PointModel& point_model, const PointOrder& order, f focal_length);
-
- Affine X_CM; // transform from model to camera
- Affine X_CM_expected;
- PointOrder prev_positions;
- Timer t;
- bool init_phase = true;
-};
-
-} // ns pt_impl
-
-using PointTracker = pt_impl::PointTracker;
-using PointModel = pt_impl::PointModel;
diff --git a/tracker-points/pt-api.hpp b/tracker-points/pt-api.hpp
index 69f74498..81a52f7f 100644
--- a/tracker-points/pt-api.hpp
+++ b/tracker-points/pt-api.hpp
@@ -20,6 +20,8 @@
# pragma clang diagnostic ignored "-Wweak-vtables"
#endif
+const int KPointCount = 3;
+
struct pt_camera_info final
{
using f = numeric_types::f;