diff options
| author | Stanislaw Halik <sthalik@misaki.pl> | 2018-12-29 16:49:56 +0100 | 
|---|---|---|
| committer | Stanislaw Halik <sthalik@misaki.pl> | 2019-01-16 07:48:19 +0100 | 
| commit | a0bc55227ca1511a0c705e89c308ecbbdc7124c8 (patch) | |
| tree | 80ceb975f155ce386dbc0049e9b379dd6e3b63d2 | |
| parent | f1af5a5a6967cec291d0054c32f69545fd824c4f (diff) | |
tracker/pt: cleanup, declarations, lock scope
| -rw-r--r-- | tracker-pt/ftnoir_tracker_pt.cpp | 87 | ||||
| -rw-r--r-- | tracker-pt/ftnoir_tracker_pt.h | 11 | ||||
| -rw-r--r-- | tracker-pt/ftnoir_tracker_pt_dialog.cpp | 4 | ||||
| -rw-r--r-- | tracker-pt/module/point_extractor.cpp | 15 | ||||
| -rw-r--r-- | tracker-pt/module/point_extractor.h | 1 | ||||
| -rw-r--r-- | tracker-pt/point_tracker.cpp | 25 | ||||
| -rw-r--r-- | tracker-pt/point_tracker.h | 8 | 
7 files changed, 69 insertions, 82 deletions
| diff --git a/tracker-pt/ftnoir_tracker_pt.cpp b/tracker-pt/ftnoir_tracker_pt.cpp index cee9c2a0..f52564f7 100644 --- a/tracker-pt/ftnoir_tracker_pt.cpp +++ b/tracker-pt/ftnoir_tracker_pt.cpp @@ -66,41 +66,39 @@ void Tracker_PT::run()          if (new_frame)          { -            spinlock_guard l(center_flag); -              *preview_frame = *frame;              point_extractor->extract_points(*frame, *preview_frame, points);              point_count = points.size();              const double fx = pt_camera_info::get_focal_length(info.fov, info.res_x, info.res_y); -              const bool success = points.size() >= PointModel::N_POINTS; -            if (success) -            { -                point_tracker.track(points, -                                    PointModel(s), -                                    info, -                                    s.dynamic_pose ? s.init_phase_timeout : 0); -                ever_success = true; -            } +            Affine X_CM;              { -                Affine X_CM; +                spinlock_guard l(center_flag); + +                if (success)                  { -                    QMutexLocker l(&data_mtx); -                    X_CM = point_tracker.pose(); +                    point_tracker.track(points, +                                        PointModel(s), +                                        info, +                                        s.dynamic_pose ? s.init_phase_timeout : 0); +                    ever_success = true;                  } -                // just copy pasted these lines from below -                Affine X_MH(mat33::eye(), vec3(s.t_MH_x, s.t_MH_y, s.t_MH_z)); -                Affine X_GH = X_CM * X_MH; -                vec3 p = X_GH.t; // head (center?) position in global space - -                preview_frame->draw_head_center((p[0] * fx) / p[2], (p[1] * fx) / p[2]); +                spinlock_guard l2(data_lock); +                X_CM = point_tracker.pose();              } +            // just copy pasted these lines from below +            Affine X_MH(mat33::eye(), vec3(s.t_MH_x, s.t_MH_y, s.t_MH_z)); +            Affine X_GH = X_CM * X_MH; +            vec3 p = X_GH.t; // head (center?) position in global space + +            preview_frame->draw_head_center((p[0] * fx) / p[2], (p[1] * fx) / p[2]); +              video_widget->update_image(preview_frame->get_bitmap());              { @@ -151,32 +149,36 @@ void Tracker_PT::data(double *data)  {      if (ever_success)      { -        Affine X_CM = pose(); +        Affine X_CM; +        { +            spinlock_guard l(&data_lock); +            X_CM = point_tracker.pose(); +        }          Affine X_MH(mat33::eye(), vec3(s.t_MH_x, s.t_MH_y, s.t_MH_z)); -        Affine X_GH = X_CM * X_MH; +        Affine X_GH(X_CM * X_MH);          // translate rotation matrix from opengl (G) to roll-pitch-yaw (E) frame          // -z -> x, y -> z, x -> -y          mat33 R_EG(0, 0,-1,                     -1, 0, 0,                     0, 1, 0); -        mat33 R = R_EG *  X_GH.R * R_EG.t(); +        mat33 R(R_EG *  X_GH.R * R_EG.t());          // get translation(s)          const vec3& t = X_GH.t;          // extract rotation angles -        { -            f alpha, beta, gamma; -            beta  = atan2( -R(2,0), sqrt(R(2,1)*R(2,1) + R(2,2)*R(2,2)) ); -            alpha = atan2( R(1,0), R(0,0)); -            gamma = atan2( R(2,1), R(2,2)); - -            data[Yaw] = rad2deg * alpha; -            data[Pitch] = -rad2deg * beta; -            data[Roll] = rad2deg * gamma; -        } +        f alpha, beta, gamma; +        beta  = atan2( -R(2,0), sqrt(R(2,1)*R(2,1) + R(2,2)*R(2,2)) ); +        alpha = atan2( R(1,0), R(0,0) ); +        gamma = atan2( R(2,1), R(2,2) ); + +        constexpr f rad2deg = f(180/M_PI); + +        data[Yaw] = rad2deg * alpha; +        data[Pitch] = -rad2deg * beta; +        data[Roll] = rad2deg * gamma;          // convert to cm          data[TX] = t[0] / 10; @@ -193,25 +195,24 @@ bool Tracker_PT::center()      return false;  } -Affine Tracker_PT::pose() -{ -    QMutexLocker l(&data_mtx); - -    return point_tracker.pose(); -} -  int Tracker_PT::get_n_points()  {      return int(point_count);  } -bool Tracker_PT::get_cam_info(pt_camera_info* info) +bool Tracker_PT::get_cam_info(pt_camera_info& info)  { -    QMutexLocker lock(&camera_mtx); +    QMutexLocker l(&camera_mtx);      bool ret; -    std::tie(ret, *info) = camera->get_info(); +    std::tie(ret, info) = camera->get_info();      return ret;  } +Affine Tracker_PT::pose() const +{ +    spinlock_guard l(data_lock); +    return point_tracker.pose(); +} +  } // ns pt_module diff --git a/tracker-pt/ftnoir_tracker_pt.h b/tracker-pt/ftnoir_tracker_pt.h index 8cf609c3..20cfaa8b 100644 --- a/tracker-pt/ftnoir_tracker_pt.h +++ b/tracker-pt/ftnoir_tracker_pt.h @@ -45,9 +45,9 @@ public:      void data(double* data) override;      bool center() override; -    Affine pose();      int  get_n_points(); -    bool get_cam_info(pt_camera_info* info); +    [[nodiscard]] bool get_cam_info(pt_camera_info& info); +    Affine pose() const;  public slots:      bool maybe_reopen_camera();      void set_fov(int value); @@ -57,7 +57,6 @@ private:      pointer<pt_runtime_traits> traits;      QMutex camera_mtx; -    QMutex data_mtx;      PointTracker point_tracker; @@ -76,10 +75,8 @@ private:      std::atomic<unsigned> point_count { 0 };      std::atomic<bool> ever_success { false }; -    std::atomic_flag center_flag = ATOMIC_FLAG_INIT; - -    static constexpr inline f rad2deg = f(180/M_PI); -    //static constexpr float deg2rad = float(M_PI/180); +    mutable std::atomic_flag center_flag = ATOMIC_FLAG_INIT; +    mutable std::atomic_flag data_lock = ATOMIC_FLAG_INIT;  };  } // ns pt_impl diff --git a/tracker-pt/ftnoir_tracker_pt_dialog.cpp b/tracker-pt/ftnoir_tracker_pt_dialog.cpp index d900b78d..87f4069f 100644 --- a/tracker-pt/ftnoir_tracker_pt_dialog.cpp +++ b/tracker-pt/ftnoir_tracker_pt_dialog.cpp @@ -128,7 +128,7 @@ QString TrackerDialog_PT::threshold_display_text(int threshold_value)              h = 480;          } -        if (tracker && tracker->get_cam_info(&info) && info.res_x * info.res_y != 0) +        if (tracker && tracker->get_cam_info(info) && info.res_x * info.res_y != 0)          {              w = info.res_x;              h = info.res_y; @@ -199,7 +199,7 @@ void TrackerDialog_PT::startstop_trans_calib(bool start)  void TrackerDialog_PT::poll_tracker_info_impl()  {      pt_camera_info info; -    if (tracker && tracker->get_cam_info(&info)) +    if (tracker && tracker->get_cam_info(info))      {          ui.caminfo_label->setText(tr("%1x%2 @ %3 FPS").arg(info.res_x).arg(info.res_y).arg(iround(info.fps))); diff --git a/tracker-pt/module/point_extractor.cpp b/tracker-pt/module/point_extractor.cpp index 80d9dfc2..e6364a88 100644 --- a/tracker-pt/module/point_extractor.cpp +++ b/tracker-pt/module/point_extractor.cpp @@ -120,13 +120,6 @@ void PointExtractor::extract_single_channel(const cv::Mat& orig_frame, int idx,      cv::mixChannels(&orig_frame, 1, &dest, 1, from_to, 1);  } -void PointExtractor::extract_channels(const cv::Mat& orig_frame, const int* order, int order_npairs) -{ -    ensure_channel_buffers(orig_frame); - -    cv::mixChannels(&orig_frame, 1, (cv::Mat*) ch, order_npairs, order, order_npairs); -} -  void PointExtractor::color_to_grayscale(const cv::Mat& frame, cv::Mat1b& output)  {      switch (s.blob_color) @@ -148,10 +141,10 @@ void PointExtractor::color_to_grayscale(const cv::Mat& frame, cv::Mat1b& output)      }      case pt_color_average:      { -        const int W = frame.cols, H = frame.rows; -        const cv::Mat tmp = frame.reshape(1, W * H); -        cv::Mat output_ = output.reshape(1, W * H); -        cv::reduce(tmp, output_, 1, cv::REDUCE_AVG); +        const int W = frame.cols, H = frame.rows, sz = W*H; +        cv::reduce(frame.reshape(1, sz), +                   output.reshape(1, sz), +                   1, cv::REDUCE_AVG);          break;      }      default: diff --git a/tracker-pt/module/point_extractor.h b/tracker-pt/module/point_extractor.h index 2288f1a1..7dd82234 100644 --- a/tracker-pt/module/point_extractor.h +++ b/tracker-pt/module/point_extractor.h @@ -49,7 +49,6 @@ private:      void ensure_buffers(const cv::Mat& frame);      void extract_single_channel(const cv::Mat& orig_frame, int idx, cv::Mat& dest); -    void extract_channels(const cv::Mat& orig_frame, const int* order, int order_npairs);      void color_to_grayscale(const cv::Mat& frame, cv::Mat1b& output);      void threshold_image(const cv::Mat& frame_gray, cv::Mat1b& output); diff --git a/tracker-pt/point_tracker.cpp b/tracker-pt/point_tracker.cpp index e295a4c9..636a253d 100644 --- a/tracker-pt/point_tracker.cpp +++ b/tracker-pt/point_tracker.cpp @@ -96,12 +96,12 @@ PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2*      p[2] = project(model.M02, fx);      const int diagonal = int(std::sqrt(f(info.res_x*info.res_x + info.res_y*info.res_y))); -    constexpr int div = 100; -    const int max_dist = diagonal / div; // 8 pixels for 640x480 +    constexpr int div = 80; +    const int max_dist = diagonal / div; // 10 pixels for 640x480      // set correspondences by minimum distance to projected model point      bool point_taken[PointModel::N_POINTS]; -    for (unsigned i=0; i<PointModel::N_POINTS; ++i) +    for (unsigned i=0; i<PointModel::N_POINTS; ++i) // NOLINT(modernize-loop-convert)          point_taken[i] = false;      for (unsigned i=0; i<PointModel::N_POINTS; ++i) @@ -113,17 +113,15 @@ PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2*          {              vec2 d = p[i]-points[j];              f sdist = d.dot(d); -            if (sdist < min_sdist || j==0) +            if (sdist < min_sdist || j == 0)              {                  min_idx = j;                  min_sdist = sdist;              }          } -        if (min_sdist > max_dist) -            return find_correspondences(points, model);          // if one point is closest to more than one model point, fallback -        if (point_taken[min_idx]) +        if (min_sdist > max_dist || point_taken[min_idx])          {              init_phase = true;              return find_correspondences(points, model); @@ -135,7 +133,7 @@ PointTracker::PointOrder PointTracker::find_correspondences_previous(const vec2*      return p;  } -bool PointTracker::maybe_use_old_point_order(const PointOrder& order, const pt_camera_info& info) +bool PointTracker::maybe_use_old_pose(const PointOrder& order, const pt_camera_info& info)  {      constexpr f std_width = 640, std_height = 480; @@ -226,15 +224,14 @@ void PointTracker::track(const std::vector<vec2>& points,  PointTracker::PointOrder PointTracker::find_correspondences(const vec2* points, const PointModel& model)  { -    static const Affine a(mat33::eye(), vec3(0, 0, 1));      // We do a simple freetrack-like sorting in the init phase...      unsigned point_d_order[PointModel::N_POINTS];      unsigned model_d_order[PointModel::N_POINTS]; -    // sort points +    // calculate d and d_order for simple freetrack-like point correspondence      vec2 d(model.M01[0]-model.M02[0], model.M01[1]-model.M02[1]); +    // sort points      model.get_d_order(points, point_d_order, d); -    // calculate d and d_order for simple freetrack-like point correspondence -    vec2 pts[3] = { +    vec2 pts[PointModel::N_POINTS] {          vec2(0, 0),          vec2(model.M01[0], model.M01[1]),          vec2(model.M02[0], model.M02[1]) @@ -358,7 +355,7 @@ int PointTracker::POSIT(const PointModel& model, const PointOrder& order, f foca          // check for convergence condition          const f delta = fabs(epsilon_1 - old_epsilon_1) + fabs(epsilon_2 - old_epsilon_2); -        if (!(delta > constants::eps)) +        if (delta < constants::eps)              break;          old_epsilon_1 = epsilon_1; @@ -383,7 +380,7 @@ int PointTracker::POSIT(const PointModel& model, const PointOrder& order, f foca              }          } -    for (unsigned i = 0; i < 3; i++) +    for (unsigned i = 0; i < 3; i++) // NOLINT(modernize-loop-convert)      {          int ret = std::fpclassify(t[i]);          if (ret == FP_NAN || ret == FP_INFINITE) diff --git a/tracker-pt/point_tracker.h b/tracker-pt/point_tracker.h index 095b79d2..63d81456 100644 --- a/tracker-pt/point_tracker.h +++ b/tracker-pt/point_tracker.h @@ -61,7 +61,7 @@ public:      // f : (focal length)/(sensor width)      // dt : time since last call      void track(const std::vector<vec2>& projected_points, const PointModel& model, const pt_camera_info& info, int init_phase_timeout); -    Affine pose() { return X_CM; } +    Affine pose() const { return X_CM; }      vec2 project(const vec3& v_M, f focal_length);      vec2 project(const vec3& v_M, f focal_length, const Affine& X_CM);      void reset_state(); @@ -70,7 +70,7 @@ private:      // the points in model order      using PointOrder = std::array<vec2, 3>; -    bool maybe_use_old_point_order(const PointOrder& order, const pt_camera_info& info); +    bool maybe_use_old_pose(const PointOrder& order, const pt_camera_info& info);      PointOrder find_correspondences(const vec2* projected_points, const PointModel &model);      PointOrder find_correspondences_previous(const vec2* points, const PointModel &model, const pt_camera_info& info); @@ -78,9 +78,9 @@ private:      int POSIT(const PointModel& point_model, const PointOrder& order, f focal_length);      Affine X_CM; // transform from model to camera -    PointOrder prev_order, prev_scaled_order; +    PointOrder prev_positions;      Timer t; -    bool init_phase = true, prev_order_valid = false; +    bool init_phase = true, prev_positions_valid = false;  };  } // ns pt_module | 
