summaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--tracker-pt/camera.cpp49
-rw-r--r--tracker-pt/camera.h75
-rw-r--r--tracker-pt/ftnoir_tracker_pt.cpp57
-rw-r--r--tracker-pt/ftnoir_tracker_pt.h2
-rw-r--r--tracker-pt/point_extractor.cpp8
-rw-r--r--tracker-pt/point_extractor.h1
-rw-r--r--tracker-pt/point_tracker.cpp31
-rw-r--r--tracker-pt/point_tracker.h5
8 files changed, 118 insertions, 110 deletions
diff --git a/tracker-pt/camera.cpp b/tracker-pt/camera.cpp
index 60eb4bb8..220bcc8e 100644
--- a/tracker-pt/camera.cpp
+++ b/tracker-pt/camera.cpp
@@ -6,8 +6,8 @@
*/
#include "camera.h"
-#include <string>
-#include <QDebug>
+
+namespace impl {
QString Camera::get_desired_name() const
{
@@ -19,7 +19,24 @@ QString Camera::get_active_name() const
return active_name;
}
-DEFUN_WARN_UNUSED bool Camera::get_info(CamInfo& ret)
+void CamInfo::get_focal_length(f& fx) const
+{
+ using std::tan;
+ using std::atan;
+ using std::sqrt;
+
+ const double diag_len = sqrt(double(res_x*res_x + res_y*res_y));
+ const double aspect_x = res_x / diag_len;
+ //const double aspect_y = res_y / diag_len;
+ const double diag_fov = fov * M_PI/180;
+ const double fov_x = 2*atan(tan(diag_fov*.5) * aspect_x);
+ //const double fov_y = 2*atan(tan(diag_fov*.5) * aspect_y);
+ fx = .5 / tan(fov_x * .5);
+ //fy = .5 / tan(fov_y * .5);
+ //static bool once = false; if (!once) { once = true; qDebug() << "f" << ret << "fov" << (fov * 180/M_PI); }
+}
+
+DEFUN_WARN_UNUSED bool Camera::get_info(CamInfo& ret) const
{
if (cam_info.res_x == 0 || cam_info.res_y == 0)
return false;
@@ -27,20 +44,29 @@ DEFUN_WARN_UNUSED bool Camera::get_info(CamInfo& ret)
return true;
}
-bool Camera::get_frame(double dt, cv::Mat* frame)
+DEFUN_WARN_UNUSED bool Camera::get_frame(double dt, cv::Mat& frame, CamInfo& info)
{
bool new_frame = _get_frame(frame);
+
// measure fps of valid frames
static constexpr double RC = .1; // seconds
const double alpha = dt/(dt + RC);
dt_valid += dt;
+
if (new_frame)
{
if (dt_mean < 2e-3)
dt_mean = dt;
else
dt_mean = (1-alpha) * dt_mean + alpha * dt_valid;
+
cam_info.fps = dt_mean > 2e-3 ? int(1 / dt_mean + .65) : 0;
+ cam_info.res_x = frame.cols;
+ cam_info.res_y = frame.rows;
+ cam_info.fov = s.fov;
+
+ info = cam_info;
+
dt_valid = 0;
}
else
@@ -58,6 +84,8 @@ DEFUN_WARN_UNUSED bool Camera::start(int idx, int fps, int res_x, int res_y)
cam_desired.res_x != res_x ||
cam_desired.res_y != res_y)
{
+ qDebug() << "pt: opening camera";
+
cam_desired.idx = idx;
cam_desired.fps = fps;
cam_desired.res_x = res_x;
@@ -71,9 +99,8 @@ DEFUN_WARN_UNUSED bool Camera::start(int idx, int fps, int res_x, int res_y)
if (cap->isOpened())
{
+ cam_info = CamInfo();
cam_info.idx = cam_desired.idx;
- cam_info.res_x = 0;
- cam_info.res_y = 0;
active_name = desired_name;
return true;
@@ -93,18 +120,16 @@ void Camera::stop()
cam_desired = CamInfo();
}
-bool Camera::_get_frame(cv::Mat* frame)
+DEFUN_WARN_UNUSED bool Camera::_get_frame(cv::Mat& frame)
{
if (cap && cap->isOpened())
{
- for (int i = 0; i < 100 && !cap->read(*frame); i++)
+ for (int i = 0; i < 100 && !cap->read(frame); i++)
;;
- if (frame->empty())
+ if (frame.empty())
return false;
- cam_info.res_x = frame->cols;
- cam_info.res_y = frame->rows;
return true;
}
return false;
@@ -119,3 +144,5 @@ void Camera::camera_deleter::operator()(cv::VideoCapture* cap)
std::default_delete<cv::VideoCapture>()(cap);
}
}
+
+} // ns impl
diff --git a/tracker-pt/camera.h b/tracker-pt/camera.h
index 3f5a8f43..1d3b332c 100644
--- a/tracker-pt/camera.h
+++ b/tracker-pt/camera.h
@@ -10,17 +10,27 @@
#undef NDEBUG
#include <cassert>
+#include "numeric.hpp"
+#include "ftnoir_tracker_pt_settings.h"
+
#include "compat/util.hpp"
#include <opencv2/core/core.hpp>
-#include <memory>
#include <opencv2/videoio.hpp>
-#include <string>
+
+#include <memory>
#include <QString>
-struct CamInfo
+namespace impl {
+
+using namespace types;
+
+struct CamInfo final
{
- CamInfo() : res_x(0), res_y(0), fps(-1), idx(-1) {}
+ CamInfo() : fov(0), res_x(0), res_y(0), fps(-1), idx(-1) {}
+ void get_focal_length(f& fx) const;
+
+ double fov;
int res_x;
int res_y;
@@ -31,40 +41,47 @@ struct CamInfo
class Camera final
{
public:
- Camera() : dt_valid(0), dt_mean(0) {}
+ Camera() : dt_valid(0), dt_mean(0) {}
- DEFUN_WARN_UNUSED bool start(int idx, int fps, int res_x, int res_y);
- void stop();
+ DEFUN_WARN_UNUSED bool start(int idx, int fps, int res_x, int res_y);
+ void stop();
- DEFUN_WARN_UNUSED bool get_frame(double dt, cv::Mat* frame);
- DEFUN_WARN_UNUSED bool get_info(CamInfo &ret);
+ DEFUN_WARN_UNUSED bool get_frame(double dt, cv::Mat& frame, CamInfo& info);
+ DEFUN_WARN_UNUSED bool get_info(CamInfo &ret) const;
- CamInfo get_desired() const { return cam_desired; }
- QString get_desired_name() const;
- QString get_active_name() const;
+ CamInfo get_desired() const { return cam_desired; }
+ QString get_desired_name() const;
+ QString get_active_name() const;
- cv::VideoCapture& operator*() { assert(cap); return *cap; }
- const cv::VideoCapture& operator*() const { assert(cap); return *cap; }
- cv::VideoCapture* operator->() { assert(cap); return cap.get(); }
- const cv::VideoCapture* operator->() const { return cap.get(); }
- operator bool() const { return cap && cap->isOpened(); }
+ cv::VideoCapture& operator*() { assert(cap); return *cap; }
+ const cv::VideoCapture& operator*() const { assert(cap); return *cap; }
+ cv::VideoCapture* operator->() { assert(cap); return cap.get(); }
+ const cv::VideoCapture* operator->() const { return cap.get(); }
+ operator bool() const { return cap && cap->isOpened(); }
private:
- DEFUN_WARN_UNUSED bool _get_frame(cv::Mat* frame);
+ DEFUN_WARN_UNUSED bool _get_frame(cv::Mat& frame);
+
+ settings_pt s;
- double dt_valid;
- double dt_mean;
+ double dt_valid;
+ double dt_mean;
- CamInfo cam_info;
- CamInfo cam_desired;
- QString desired_name, active_name;
+ CamInfo cam_info;
+ CamInfo cam_desired;
+ QString desired_name, active_name;
- struct camera_deleter final
- {
- void operator()(cv::VideoCapture* cap);
- };
+ struct camera_deleter final
+ {
+ void operator()(cv::VideoCapture* cap);
+ };
- using camera_ptr = std::unique_ptr<cv::VideoCapture, camera_deleter>;
+ using camera_ptr = std::unique_ptr<cv::VideoCapture, camera_deleter>;
- camera_ptr cap;
+ camera_ptr cap;
};
+
+} // ns impl
+
+using impl::Camera;
+using impl::CamInfo;
diff --git a/tracker-pt/ftnoir_tracker_pt.cpp b/tracker-pt/ftnoir_tracker_pt.cpp
index 31d3cb14..33a40825 100644
--- a/tracker-pt/ftnoir_tracker_pt.cpp
+++ b/tracker-pt/ftnoir_tracker_pt.cpp
@@ -23,7 +23,7 @@ Tracker_PT::Tracker_PT() :
commands(0),
ever_success(false)
{
- connect(s.b.get(), SIGNAL(saving()), this, SLOT(apply_settings()));
+ connect(s.b.get(), SIGNAL(saving()), this, SLOT(apply_settings()), Qt::DirectConnection);
}
Tracker_PT::~Tracker_PT()
@@ -47,35 +47,6 @@ void Tracker_PT::reset_command(Command command)
commands &= ~command;
}
-bool Tracker_PT::get_focal_length(f& ret)
-{
- QMutexLocker l(&camera_mtx);
- CamInfo info;
- const bool res = camera.get_info(info);
- if (res)
- {
- using std::tan;
- using std::atan;
- using std::sqrt;
-
- const double w = info.res_x, h = info.res_y;
-#if 0
- const double diag = sqrt(w/h*w/h + h/w*h/w);
- const double diag_fov = static_cast<int>(s.fov) * M_PI / 180.;
- const double fov = 2.*atan(tan(diag_fov/2.)/diag);
- ret = .5 / tan(.5 * fov);
-#else
- const double diag_fov = s.fov * M_PI/180;
- const double aspect = w / sqrt(w*w + h*h);
- const double fov = 2*atan(tan(diag_fov*.5) * aspect);
- ret = .5 / tan(fov * .5);
- //static bool once = false; if (!once) { once = true; qDebug() << "f" << ret << "fov" << (fov * 180/M_PI); }
-#endif
- return true;
- }
- return false;
-}
-
void Tracker_PT::run()
{
cv::setNumThreads(0);
@@ -93,30 +64,27 @@ void Tracker_PT::run()
{
const double dt = time.elapsed_seconds();
time.start();
+ CamInfo cam_info;
bool new_frame;
{
QMutexLocker l(&camera_mtx);
- new_frame = camera.get_frame(dt, &frame);
- if (frame.rows != frame_.rows || frame.cols != frame_.cols)
- frame_ = cv::Mat(frame.rows, frame.cols, CV_8UC3);
+ new_frame = camera.get_frame(dt, frame, cam_info);
+ if (new_frame)
+ {
+ if (frame.rows != frame_.rows || frame.cols != frame_.cols)
+ frame_ = cv::Mat(frame.rows, frame.cols, CV_8UC3);
+ }
frame.copyTo(frame_);
}
if (new_frame && !frame_.empty())
{
- CamInfo cam_info;
-
- if (!camera.get_info(cam_info))
- continue;
-
point_extractor.extract_points(frame_, points);
point_count = points.size();
f fx;
-
- if (!get_focal_length(fx))
- continue;
+ cam_info.get_focal_length(fx);
const bool success = points.size() >= PointModel::N_POINTS;
@@ -124,11 +92,8 @@ void Tracker_PT::run()
{
point_tracker.track(points,
PointModel(s),
- fx,
- s.dynamic_pose,
- s.init_phase_timeout,
- cam_info.res_x,
- cam_info.res_y);
+ cam_info,
+ s.dynamic_pose ? s.init_phase_timeout : 0);
ever_success = true;
}
diff --git a/tracker-pt/ftnoir_tracker_pt.h b/tracker-pt/ftnoir_tracker_pt.h
index 66928655..fcf74bff 100644
--- a/tracker-pt/ftnoir_tracker_pt.h
+++ b/tracker-pt/ftnoir_tracker_pt.h
@@ -63,8 +63,6 @@ private:
void set_command(Command command);
void reset_command(Command command);
- bool get_focal_length(f& ret);
-
QMutex camera_mtx;
QMutex data_mtx;
Camera camera;
diff --git a/tracker-pt/point_extractor.cpp b/tracker-pt/point_extractor.cpp
index a688faad..f7f63784 100644
--- a/tracker-pt/point_extractor.cpp
+++ b/tracker-pt/point_extractor.cpp
@@ -83,9 +83,6 @@ void PointExtractor::extract_points(cv::Mat& frame, std::vector<vec2>& points)
using std::round;
using std::sort;
- const int W = frame.cols;
- const int H = frame.rows;
-
if (frame_gray.rows != frame.rows || frame_gray.cols != frame.cols)
{
frame_gray = cv::Mat(frame.rows, frame.cols, CV_8U);
@@ -219,6 +216,9 @@ end:
sort(blobs.begin(), blobs.end(), [](const blob& b1, const blob& b2) -> bool { return b2.brightness < b1.brightness; });
+ const int W = frame.cols;
+ const int H = frame.rows;
+
for (idx = 0; idx < std::min(PointModel::N_POINTS, unsigned(blobs.size())); ++idx)
{
blob &b = blobs[idx];
@@ -253,6 +253,8 @@ end:
for (auto& b : blobs)
{
+ // note: H/W is equal to fx/fy
+
vec2 p((b.pos[0] - W/2)/W, -(b.pos[1] - H/2)/W);
points.push_back(p);
}
diff --git a/tracker-pt/point_extractor.h b/tracker-pt/point_extractor.h
index ad350344..3ad8ed52 100644
--- a/tracker-pt/point_extractor.h
+++ b/tracker-pt/point_extractor.h
@@ -9,6 +9,7 @@
#pragma once
#include "ftnoir_tracker_pt_settings.h"
+#include "camera.h"
#include "numeric.hpp"
#include <opencv2/core.hpp>
diff --git a/tracker-pt/point_tracker.cpp b/tracker-pt/point_tracker.cpp
index 7c0367d0..cae68bf3 100644
--- a/tracker-pt/point_tracker.cpp
+++ b/tracker-pt/point_tracker.cpp
@@ -89,16 +89,15 @@ PointTracker::PointTracker() : init_phase(true)
PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2* points,
const PointModel& model,
- f focal_length,
- int w,
- int h)
+ const CamInfo& info)
{
+ f fx; info.get_focal_length(fx);
PointTracker::PointOrder p;
- p[0] = project(vec3(0,0,0), focal_length);
- p[1] = project(model.M01, focal_length);
- p[2] = project(model.M02, focal_length);
+ p[0] = project(vec3(0,0,0), fx);
+ p[1] = project(model.M01, fx);
+ p[2] = project(model.M02, fx);
- const int diagonal = int(std::sqrt(w*w + h*h));
+ const int diagonal = int(std::sqrt(double(info.res_x*info.res_x + info.res_y*info.res_y)));
static constexpr int div = 100;
const int max_dist = diagonal / div; // 8 pixels for 640x480
@@ -140,26 +139,25 @@ PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2*
void PointTracker::track(const std::vector<vec2>& points,
const PointModel& model,
- f focal_length,
- bool dynamic_pose,
- int init_phase_timeout,
- int w,
- int h)
+ const CamInfo& info,
+ int init_phase_timeout)
{
+ f fx;
+ info.get_focal_length(fx);
PointOrder order;
- if (t.elapsed_ms() > init_phase_timeout)
+ if (init_phase_timeout > 0 && t.elapsed_ms() > init_phase_timeout)
{
t.start();
init_phase = true;
}
- if (!dynamic_pose || init_phase)
+ if (!(init_phase_timeout > 0 && !init_phase))
order = find_correspondences(points.data(), model);
else
- order = find_correspondences_previous(points.data(), model, focal_length, w, h);
+ order = find_correspondences_previous(points.data(), model, info);
- if (POSIT(model, order, focal_length) != -1)
+ if (POSIT(model, order, fx) != -1)
{
init_phase = false;
t.start();
@@ -351,4 +349,3 @@ vec2 PointTracker::project(const vec3& v_M, f focal_length, const Affine& X_CM)
vec3 v_C = X_CM * v_M;
return vec2(focal_length*v_C[0]/v_C[2], focal_length*v_C[1]/v_C[2]);
}
-
diff --git a/tracker-pt/point_tracker.h b/tracker-pt/point_tracker.h
index 0bac05ab..63caf0dd 100644
--- a/tracker-pt/point_tracker.h
+++ b/tracker-pt/point_tracker.h
@@ -11,6 +11,7 @@
#include "ftnoir_tracker_pt_settings.h"
#include "affine.hpp"
#include "numeric.hpp"
+#include "camera.h"
#include <opencv2/core.hpp>
#include <cstddef>
@@ -57,7 +58,7 @@ public:
// track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5)
// f : (focal length)/(sensor width)
// dt : time since last call
- void track(const std::vector<vec2>& projected_points, const PointModel& model, f focal_length, bool dynamic_pose, int init_phase_timeout, int w, int h);
+ void track(const std::vector<vec2>& projected_points, const PointModel& model, const CamInfo& info, int init_phase_timeout);
Affine pose() { return X_CM; }
vec2 project(const vec3& v_M, f focal_length);
vec2 project(const vec3& v_M, f focal_length, const Affine& X_CM);
@@ -67,7 +68,7 @@ private:
using PointOrder = std::array<vec2, 3>;
PointOrder find_correspondences(const vec2* projected_points, const PointModel &model);
- PointOrder find_correspondences_previous(const vec2* points, const PointModel &model, f focal_length, int w, int h);
+ PointOrder find_correspondences_previous(const vec2* points, const PointModel &model, const CamInfo& info);
int POSIT(const PointModel& point_model, const PointOrder& order, f focal_length); // The POSIT algorithm, returns the number of iterations
Affine X_CM; // trafo from model to camera