diff options
author | Stanislaw Halik <sthalik@misaki.pl> | 2019-01-07 11:01:19 +0100 |
---|---|---|
committer | Stanislaw Halik <sthalik@misaki.pl> | 2019-01-16 07:48:46 +0100 |
commit | 22d3ded34963e663f289c181aa94b54f00693b34 (patch) | |
tree | aad6c435cc1c04609315c8a03c3aa65e09451f26 /tracker-pt | |
parent | 442c6251669a387b1b8b75b9cb90eec6ee9a8b9b (diff) |
tracker/{pt,wii}: allow float/double in cv/numeric
Diffstat (limited to 'tracker-pt')
-rw-r--r-- | tracker-pt/ftnoir_tracker_pt.cpp | 2 | ||||
-rw-r--r-- | tracker-pt/module/camera.cpp | 6 | ||||
-rw-r--r-- | tracker-pt/module/camera.h | 6 | ||||
-rw-r--r-- | tracker-pt/module/frame.cpp | 2 | ||||
-rw-r--r-- | tracker-pt/module/frame.hpp | 2 | ||||
-rw-r--r-- | tracker-pt/module/point_extractor.cpp | 22 | ||||
-rw-r--r-- | tracker-pt/point_tracker.cpp | 10 | ||||
-rw-r--r-- | tracker-pt/pt-api.cpp | 32 | ||||
-rw-r--r-- | tracker-pt/pt-api.hpp | 20 |
9 files changed, 53 insertions, 49 deletions
diff --git a/tracker-pt/ftnoir_tracker_pt.cpp b/tracker-pt/ftnoir_tracker_pt.cpp index c72a7ffa..da295e39 100644 --- a/tracker-pt/ftnoir_tracker_pt.cpp +++ b/tracker-pt/ftnoir_tracker_pt.cpp @@ -71,7 +71,7 @@ void Tracker_PT::run() point_extractor->extract_points(*frame, *preview_frame, points); point_count = points.size(); - const double fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y); + const f fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y); const bool success = points.size() >= PointModel::N_POINTS; Affine X_CM; diff --git a/tracker-pt/module/camera.cpp b/tracker-pt/module/camera.cpp index 86cffd72..1afecc92 100644 --- a/tracker-pt/module/camera.cpp +++ b/tracker-pt/module/camera.cpp @@ -60,12 +60,12 @@ Camera::result Camera::get_frame(pt_frame& frame_) if (new_frame) { - const double dt = t.elapsed_seconds(); + const f dt = (f)t.elapsed_seconds(); t.start(); // measure fps of valid frames - constexpr double RC = .1; // seconds - const double alpha = dt/(dt + RC); + constexpr f RC = f{1}/10; // seconds + const f alpha = dt/(dt + RC); if (dt_mean < dt_eps) dt_mean = dt; diff --git a/tracker-pt/module/camera.h b/tracker-pt/module/camera.h index 8abe6cf0..69b2f860 100644 --- a/tracker-pt/module/camera.h +++ b/tracker-pt/module/camera.h @@ -33,13 +33,13 @@ struct Camera final : pt_camera QString get_desired_name() const override; QString get_active_name() const override; - void set_fov(double value) override { fov = value; } + void set_fov(f value) override { fov = value; } void show_camera_settings() override; private: [[nodiscard]] bool get_frame_(cv::Mat& frame); - double dt_mean = 0, fov = 30; + f dt_mean = 0, fov = 30; Timer t; pt_camera_info cam_info; pt_camera_info cam_desired; @@ -56,7 +56,7 @@ private: pt_settings s; - static constexpr inline double dt_eps = 1./256; + static constexpr inline f dt_eps = f{1}/256; }; } // ns pt_module diff --git a/tracker-pt/module/frame.cpp b/tracker-pt/module/frame.cpp index 7dd8c92f..c88099f1 100644 --- a/tracker-pt/module/frame.cpp +++ b/tracker-pt/module/frame.cpp @@ -52,7 +52,7 @@ QImage Preview::get_bitmap() QImage::Format_ARGB32); } -void Preview::draw_head_center(double x, double y) +void Preview::draw_head_center(f x, f y) { auto [px_, py_] = to_pixel_pos(x, y, frame_copy.cols, frame_copy.rows); diff --git a/tracker-pt/module/frame.hpp b/tracker-pt/module/frame.hpp index d440bd67..89334599 100644 --- a/tracker-pt/module/frame.hpp +++ b/tracker-pt/module/frame.hpp @@ -26,7 +26,7 @@ struct Preview final : pt_preview Preview& operator=(const pt_frame& frame) override; QImage get_bitmap() override; - void draw_head_center(double x, double y) override; + void draw_head_center(f x, f y) override; operator cv::Mat&() { return frame_copy; } operator cv::Mat const&() const { return frame_copy; } diff --git a/tracker-pt/module/point_extractor.cpp b/tracker-pt/module/point_extractor.cpp index 0f7af939..1d5d4a06 100644 --- a/tracker-pt/module/point_extractor.cpp +++ b/tracker-pt/module/point_extractor.cpp @@ -52,7 +52,7 @@ algorithm for tracking single particles with variable size and shape." (2008). static cv::Vec2d MeanShiftIteration(const cv::Mat &frame_gray, const vec2 ¤t_center, f filter_width) { // Most amazingly this function runs faster with doubles than with floats. - const f s = 1.0 / filter_width; + const f s = 1 / filter_width; f m = 0; vec2 com { 0, 0 }; @@ -62,12 +62,12 @@ static cv::Vec2d MeanShiftIteration(const cv::Mat &frame_gray, const vec2 &curre for (int j = 0; j < frame_gray.cols; j++) { f val = frame_ptr[j]; - val = val * val; // taking the square wights brighter parts of the image stronger. + val = val * val; // taking the square weighs brighter parts of the image stronger. { f dx = (j - current_center[0])*s; f dy = (i - current_center[1])*s; - f f = std::fmax(0, 1 - dx*dx - dy*dy); - val *= f; + f max = std::fmax(f{0}, 1 - dx*dx - dy*dy); + val *= max; } m += val; com[0] += j * val; @@ -76,7 +76,7 @@ static cv::Vec2d MeanShiftIteration(const cv::Mat &frame_gray, const vec2 &curre } if (m > f(.1)) { - com *= f(1) / m; + com *= 1 / m; return com; } else @@ -93,7 +93,7 @@ PointExtractor::PointExtractor(const QString& module_name) : s(module_name) void PointExtractor::ensure_channel_buffers(const cv::Mat& orig_frame) { if (ch[0].rows != orig_frame.rows || ch[0].cols != orig_frame.cols) - for (unsigned k = 0; k < 3; k++) + for (unsigned k = 0; k < 3; k++) // NOLINT(modernize-loop-convert) ch[k] = cv::Mat1b(orig_frame.rows, orig_frame.cols); } @@ -176,7 +176,7 @@ void PointExtractor::threshold_image(const cv::Mat& frame_gray, cv::Mat1b& outpu cv::noArray(), hist, 1, - (int const*) &hist_size, + &hist_size, &ranges); const f radius = (f) threshold_radius_value(frame_gray.cols, frame_gray.rows, threshold_slider_value); @@ -217,8 +217,8 @@ void PointExtractor::extract_points(const pt_frame& frame_, pt_preview& preview_ threshold_image(frame_gray_unmasked, frame_bin); frame_gray_unmasked.copyTo(frame_gray, frame_bin); - const f region_size_min = s.min_point_size; - const f region_size_max = s.max_point_size; + const f region_size_min = (f)s.min_point_size; + const f region_size_max = (f)s.max_point_size; unsigned idx = 0; @@ -309,7 +309,7 @@ end: static constexpr f radius_c = f(1.75); const f kernel_radius = b.radius * radius_c; - vec2 pos(rect.width/2., rect.height/2.); // position relative to ROI. + vec2 pos(rect.width/f{2}, rect.height/f{2}); // position relative to ROI. for (int iter = 0; iter < 10; ++iter) { @@ -329,7 +329,7 @@ end: blob& b = blobs[k]; const f dpi = preview_frame.cols / f(320); - const f offx = 10 * dpi, offy = 7.5 * dpi; + const f offx = 10 * dpi, offy = f{7.5} * dpi; const f cx = preview_frame.cols / f(frame.cols), cy = preview_frame.rows / f(frame.rows), diff --git a/tracker-pt/point_tracker.cpp b/tracker-pt/point_tracker.cpp index c4b518a9..f1f968d0 100644 --- a/tracker-pt/point_tracker.cpp +++ b/tracker-pt/point_tracker.cpp @@ -93,7 +93,7 @@ PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2* const PointModel& model, const pt_camera_info& info) { - const double fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y); + const f fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y); PointTracker::PointOrder p; p[0] = project(vec3(0,0,0), fx); p[1] = project(model.M01, fx); @@ -140,7 +140,7 @@ void PointTracker::track(const std::vector<vec2>& points, const pt_camera_info& info, int init_phase_timeout) { - const double fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y); + const f fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y); PointOrder order; if (init_phase_timeout <= 0 || t.elapsed_ms() > init_phase_timeout || init_phase) @@ -262,7 +262,7 @@ int PointTracker::POSIT(const PointModel& model, const PointOrder& order, f foca J_1 = J0 + rho*sin(theta)*model.u; J_2 = J0 - rho*sin(theta)*model.u; - f norm_const = 1/cv::norm(I_1); // all have the same norm + f norm_const = (f)(1/cv::norm(I_1)); // all have the same norm // create rotation matrices I_1 *= norm_const; J_1 *= norm_const; @@ -281,8 +281,8 @@ int PointTracker::POSIT(const PointModel& model, const PointOrder& order, f foca // pick the rotation solution closer to the expected one // in simple metric d(A,B) = || I - A * B^T || - f R_1_deviation = cv::norm(mat33::eye() - R_expected * R_1.t()); - f R_2_deviation = cv::norm(mat33::eye() - R_expected * R_2.t()); + f R_1_deviation = (f)(cv::norm(mat33::eye() - R_expected * R_1.t())); + f R_2_deviation = (f)(cv::norm(mat33::eye() - R_expected * R_2.t())); if (R_1_deviation < R_2_deviation) R_current = &R_1; diff --git a/tracker-pt/pt-api.cpp b/tracker-pt/pt-api.cpp index 415f1c13..ba301276 100644 --- a/tracker-pt/pt-api.cpp +++ b/tracker-pt/pt-api.cpp @@ -5,15 +5,15 @@ using namespace types; pt_camera_info::pt_camera_info() = default; -double pt_camera_info::get_focal_length(f fov, int res_x, int res_y) +f pt_camera_info::get_focal_length(f fov, int res_x, int res_y) { - const double diag_len = std::sqrt(double(res_x*res_x + res_y*res_y)); - const double aspect_x = res_x / diag_len; + const f diag_len = std::sqrt(f(res_x*res_x + res_y*res_y)); + const f aspect_x = res_x / diag_len; //const double aspect_y = res_y / diag_len; - const double diag_fov = fov * pi/180; - const double fov_x = 2*std::atan(std::tan(diag_fov*.5) * aspect_x); + const f diag_fov = fov * pi/180; + const f fov_x = 2*std::atan(std::tan(diag_fov*f{.5}) * aspect_x); //const double fov_y = 2*atan(tan(diag_fov*.5) * aspect_y); - const double fx = .5 / std::tan(fov_x * .5); + const f fx = f{.5} / std::tan(fov_x * f{.5}); return fx; //fy = .5 / tan(fov_y * .5); //static bool once = false; if (!once) { once = true; qDebug() << "f" << ret << "fov" << (fov * 180/M_PI); } @@ -26,27 +26,27 @@ pt_runtime_traits::~pt_runtime_traits() = default; pt_point_extractor::pt_point_extractor() = default; pt_point_extractor::~pt_point_extractor() = default; -double pt_point_extractor::threshold_radius_value(int w, int h, int threshold) +f pt_point_extractor::threshold_radius_value(int w, int h, int threshold) { - double cx = w / 640., cy = h / 480.; + f cx = w / f{640}, cy = h / f{480}; - const double min_radius = 1.75 * cx; - const double max_radius = 15 * cy; + const f min_radius = f{1.75} * cx; + const f max_radius = f{15} * cy; - const double radius = std::fmax(0., (max_radius-min_radius) * threshold / f(255) + min_radius); + const f radius = std::fmax(f{0}, (max_radius-min_radius) * threshold / f(255) + min_radius); return radius; } -std::tuple<double, double> pt_pixel_pos_mixin::to_pixel_pos(double x, double y, int w, int h) +std::tuple<f, f> pt_pixel_pos_mixin::to_pixel_pos(f x, f y, int w, int h) { - return std::make_tuple(w*(x+.5), .5*(h - 2*y*w)); + return std::make_tuple(w*(x+f{.5}), f{.5}*(h - 2*y*w)); } -std::tuple<double, double> pt_pixel_pos_mixin::to_screen_pos(double px, double py, int w, int h) +std::tuple<f, f> pt_pixel_pos_mixin::to_screen_pos(f px, f py, int w, int h) { - px *= w/(w-1.); py *= h/(h-1.); - return std::make_tuple((px - w/2.)/w, -(py - h/2.)/w); + px *= w/(w-f{1}); py *= h/(h-f{1}); + return std::make_tuple((px - w/f{2})/w, -(py - h/f{2})/w); } pt_frame::pt_frame() = default; diff --git a/tracker-pt/pt-api.hpp b/tracker-pt/pt-api.hpp index 300e4558..09c09a28 100644 --- a/tracker-pt/pt-api.hpp +++ b/tracker-pt/pt-api.hpp @@ -23,10 +23,10 @@ struct pt_camera_info final using f = types::f; pt_camera_info(); - static double get_focal_length(f fov, int res_x, int res_y); + static f get_focal_length(f fov, int res_x, int res_y); - double fov = 0; - double fps = 0; + f fov = 0; + f fps = 0; int res_x = 0; int res_y = 0; @@ -35,8 +35,10 @@ struct pt_camera_info final struct pt_pixel_pos_mixin { - static std::tuple<double, double> to_pixel_pos(double x, double y, int w, int h); - static std::tuple<double, double> to_screen_pos(double px, double py, int w, int h); + using f = types::f; + + static std::tuple<f, f> to_pixel_pos(f x, f y, int w, int h); + static std::tuple<f, f> to_screen_pos(f px, f py, int w, int h); }; struct pt_frame : pt_pixel_pos_mixin @@ -61,12 +63,13 @@ struct pt_preview : pt_frame { virtual pt_preview& operator=(const pt_frame&) = 0; virtual QImage get_bitmap() = 0; - virtual void draw_head_center(double x, double y) = 0; + virtual void draw_head_center(f x, f y) = 0; }; struct pt_camera { using result = std::tuple<bool, pt_camera_info>; + using f = types::f; pt_camera(); virtual ~pt_camera(); @@ -81,19 +84,20 @@ struct pt_camera virtual QString get_desired_name() const = 0; virtual QString get_active_name() const = 0; - virtual void set_fov(double value) = 0; + virtual void set_fov(f value) = 0; virtual void show_camera_settings() = 0; }; struct pt_point_extractor : pt_pixel_pos_mixin { using vec2 = types::vec2; + using f = types::f; pt_point_extractor(); virtual ~pt_point_extractor(); virtual void extract_points(const pt_frame& image, pt_preview& preview_frame, std::vector<vec2>& points) = 0; - static double threshold_radius_value(int w, int h, int threshold); + static f threshold_radius_value(int w, int h, int threshold); }; struct pt_runtime_traits |