From bef7aff31e5ea073f0f160ca6a2f1e56b7dd881a Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 15 Sep 2013 12:39:32 +0200 Subject: Initial PT 1.1 import Codebase broken at this stage --- FTNoIR_Tracker_PT/point_tracker.cpp | 359 ++++++++++++++++++++++++++++++++++++ 1 file changed, 359 insertions(+) create mode 100644 FTNoIR_Tracker_PT/point_tracker.cpp (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp new file mode 100644 index 00000000..acde9154 --- /dev/null +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -0,0 +1,359 @@ +/* Copyright (c) 2012 Patrick Ruoff + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + */ + +#include "point_tracker.h" + +#include +#include +#include + +#include + +using namespace cv; +using namespace boost; +using namespace std; + +const float PI = 3.14159265358979323846f; + +// ---------------------------------------------------------------------------- +static void get_row(const Matx33f& m, int i, Vec3f& v) +{ + v[0] = m(i,0); + v[1] = m(i,1); + v[2] = m(i,2); +} + +static void set_row(Matx33f& m, int i, const Vec3f& v) +{ + m(i,0) = v[0]; + m(i,1) = v[1]; + m(i,2) = v[2]; +} + +// ---------------------------------------------------------------------------- +PointModel::PointModel(Vec3f M01, Vec3f M02) + : M01(M01), + M02(M02) +{ + // calculate u + u = M01.cross(M02); + u /= norm(u); + + // calculate projection matrix on M01,M02 plane + float s11 = M01.dot(M01); + float s12 = M01.dot(M02); + float s22 = M02.dot(M02); + P = 1.0/(s11*s22-s12*s12) * Matx22f(s22, -s12, + -s12, s11); + + // calculate d and d_order for simple freetrack-like point correspondence + vector points; + points.push_back(Vec2f(0,0)); + points.push_back(Vec2f(M01[0], M01[1])); + points.push_back(Vec2f(M02[0], M02[1])); + // fit line to orthographically projected points + // ERROR: yields wrong results with colinear points?! + /* + Vec4f line; + fitLine(points, line, CV_DIST_L2, 0, 0.01, 0.01); + d[0] = line[0]; d[1] = line[1]; + */ + // TODO: fix this + d = Vec2f(M01[0]-M02[0], M01[1]-M02[1]); + + // sort model points + get_d_order(points, d_order); +} + +void PointModel::get_d_order(const std::vector& points, int d_order[]) const +{ + // get sort indices with respect to d scalar product + vector< pair > d_vals; + for (int i = 0; i(d.dot(points[i]), i)); + + struct + { + bool operator()(const pair& a, const pair& b) { return a.first < b.first; } + } comp; + sort(d_vals.begin(), d_vals.end(), comp); + + for (int i = 0; i& points, float f, float dt) +{ + if (!dynamic_pose_resolution) init_phase = true; + + dt_valid += dt; + // if there was no valid tracking result for too long, do a reset + if (dt_valid > dt_reset) + { + //qDebug()<<"dt_valid "< dt_reset "<& points, float f) +{ + if (init_phase) { + // We do a simple freetrack-like sorting in the init phase... + // sort points + int point_d_order[PointModel::N_POINTS]; + point_model->get_d_order(points, point_d_order); + + // set correspondences + for (int i=0; id_order[i]] = points[point_d_order[i]]; + } + } + else { + // ... otherwise we look at the distance to the projection of the expected model points + // project model points under current pose + p_exp[0] = project(Vec3f(0,0,0), f); + p_exp[1] = project(point_model->M01, f); + p_exp[2] = project(point_model->M02, f); + + // set correspondences by minimum distance to projected model point + bool point_taken[PointModel::N_POINTS]; + for (int i=0; iM01)/Z0; + epsilon_2 = k.dot(point_model->M02)/Z0; + + // vector of scalar products and + Vec2f I0_M0i(p[1][0]*(1.0 + epsilon_1) - p[0][0], + p[2][0]*(1.0 + epsilon_2) - p[0][0]); + Vec2f J0_M0i(p[1][1]*(1.0 + epsilon_1) - p[0][1], + p[2][1]*(1.0 + epsilon_2) - p[0][1]); + + // construct projection of I, J onto M0i plane: I0 and J0 + I0_coeff = point_model->P * I0_M0i; + J0_coeff = point_model->P * J0_M0i; + I0 = I0_coeff[0]*point_model->M01 + I0_coeff[1]*point_model->M02; + J0 = J0_coeff[0]*point_model->M01 + J0_coeff[1]*point_model->M02; + + // calculate u component of I, J + float II0 = I0.dot(I0); + float IJ0 = I0.dot(J0); + float JJ0 = J0.dot(J0); + float rho, theta; + if (JJ0 == II0) { + rho = sqrt(abs(2*IJ0)); + theta = -PI/4; + if (IJ0<0) theta *= -1; + } + else { + rho = sqrt(sqrt( (JJ0-II0)*(JJ0-II0) + 4*IJ0*IJ0 )); + theta = atan( -2*IJ0 / (JJ0-II0) ); + if (JJ0 - II0 < 0) theta += PI; + theta /= 2; + } + + // construct the two solutions + I_1 = I0 + rho*cos(theta)*point_model->u; + I_2 = I0 - rho*cos(theta)*point_model->u; + + J_1 = J0 + rho*sin(theta)*point_model->u; + J_2 = J0 - rho*sin(theta)*point_model->u; + + float norm_const = 1.0/norm(I_1); // all have the same norm + + // create rotation matrices + I_1 *= norm_const; J_1 *= norm_const; + I_2 *= norm_const; J_2 *= norm_const; + + set_row(R_1, 0, I_1); + set_row(R_1, 1, J_1); + set_row(R_1, 2, I_1.cross(J_1)); + + set_row(R_2, 0, I_2); + set_row(R_2, 1, J_2); + set_row(R_2, 2, I_2.cross(J_2)); + + // the single translation solution + Z0 = norm_const * f; + + // pick the rotation solution closer to the expected one + // in simple metric d(A,B) = || I - A * B^T || + float R_1_deviation = norm(Matx33f::eye() - R_expected * R_1.t()); + float R_2_deviation = norm(Matx33f::eye() - R_expected * R_2.t()); + + if (R_1_deviation < R_2_deviation) + R_current = &R_1; + else + R_current = &R_2; + + get_row(*R_current, 2, k); + + // check for convergence condition + if (abs(epsilon_1 - old_epsilon_1) + abs(epsilon_2 - old_epsilon_2) < EPS_THRESHOLD) + break; + old_epsilon_1 = epsilon_1; + old_epsilon_2 = epsilon_2; + } + + // apply results + X_CM.R = *R_current; + X_CM.t[0] = p[0][0] * Z0/f; + X_CM.t[1] = p[0][1] * Z0/f; + X_CM.t[2] = Z0; + + return i; + + //Rodrigues(X_CM.R, r); + //qDebug()<<"iter: "< Date: Sun, 15 Sep 2013 15:00:56 +0200 Subject: New PT from Patrick Ruoff adapted to Linux --- CMakeLists.txt | 2 + FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui | 4 +- FTNoIR_Tracker_PT/boost-compat.h | 6 ++ FTNoIR_Tracker_PT/camera.cpp | 135 ++++++++++++++++++++----- FTNoIR_Tracker_PT/camera.h | 96 +++++++++--------- FTNoIR_Tracker_PT/frame_observer.cpp | 4 +- FTNoIR_Tracker_PT/frame_observer.h | 8 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 56 ++++++++-- FTNoIR_Tracker_PT/ftnoir_tracker_pt.h | 33 +++++- FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp | 15 ++- FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.h | 6 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.cpp | 10 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.h | 18 +++- FTNoIR_Tracker_PT/point_extractor.cpp | 2 +- FTNoIR_Tracker_PT/point_extractor.h | 1 + FTNoIR_Tracker_PT/point_tracker.cpp | 31 +++++- FTNoIR_Tracker_PT/point_tracker.h | 6 +- FTNoIR_Tracker_PT/timer.cpp | 4 +- FTNoIR_Tracker_PT/video_widget.cpp | 58 ++++++++--- FTNoIR_Tracker_PT/video_widget.h | 47 ++++++++- facetracknoir/rotation.h | 18 ++-- facetracknoir/tracker_types.cpp | 12 +-- 22 files changed, 422 insertions(+), 150 deletions(-) create mode 100644 FTNoIR_Tracker_PT/boost-compat.h (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/CMakeLists.txt b/CMakeLists.txt index 0669fb74..6c5bed0f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -7,6 +7,8 @@ SET(CMAKE_SKIP_INSTALL_RPATH FALSE) SET(CMAKE_SKIP_RPATH FALSE) SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}") +add_definitions(-DOPENTRACK_API) + if(WIN32) SET(SDK_FACEAPI_ONLY FALSE CACHE BOOL "FaceAPI only (MSVC 2005)") endif() diff --git a/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui b/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui index 7bb7eb50..1495249a 100644 --- a/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui +++ b/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui @@ -9,8 +9,8 @@ 0 0 - 395 - 552 + 458 + 590 diff --git a/FTNoIR_Tracker_PT/boost-compat.h b/FTNoIR_Tracker_PT/boost-compat.h new file mode 100644 index 00000000..aea5e1a2 --- /dev/null +++ b/FTNoIR_Tracker_PT/boost-compat.h @@ -0,0 +1,6 @@ +#pragma once +#include +#define shared_ptr auto_ptr +namespace boost { + using std::auto_ptr; +}; diff --git a/FTNoIR_Tracker_PT/camera.cpp b/FTNoIR_Tracker_PT/camera.cpp index 21a910c1..a372373d 100644 --- a/FTNoIR_Tracker_PT/camera.cpp +++ b/FTNoIR_Tracker_PT/camera.cpp @@ -10,6 +10,69 @@ using namespace cv; +#if defined(OPENTRACK_API) && (defined(__unix) || defined(__linux)) +#include +#endif + +#ifdef OPENTRACK_API +void get_camera_device_names(std::vector& device_names) { +# if defined(_WIN32) + // Create the System Device Enumerator. + HRESULT hr; + ICreateDevEnum *pSysDevEnum = NULL; + hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum); + if (FAILED(hr)) + { + return ret; + } + // Obtain a class enumerator for the video compressor category. + IEnumMoniker *pEnumCat = NULL; + hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0); + + if (hr == S_OK) { + // Enumerate the monikers. + IMoniker *pMoniker = NULL; + ULONG cFetched; + while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) { + IPropertyBag *pPropBag; + hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag); + if (SUCCEEDED(hr)) { + // To retrieve the filter's friendly name, do the following: + VARIANT varName; + VariantInit(&varName); + hr = pPropBag->Read(L"FriendlyName", &varName, 0); + if (SUCCEEDED(hr)) + { + device_names.push_back(std::string(reinterpret_cast(varName.bstrVal))); + } + VariantClear(&varName); + + ////// To create an instance of the filter, do the following: + ////IBaseFilter *pFilter; + ////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter, + //// (void**)&pFilter); + // Now add the filter to the graph. + //Remember to release pFilter later. + pPropBag->Release(); + } + pMoniker->Release(); + } + pEnumCat->Release(); + } + pSysDevEnum->Release(); +# else + for (int i = 0; i < 16; i++) { + char buf[128]; + sprintf(buf, "/dev/video%d", i); + if (access(buf, R_OK | W_OK) == 0) { + device_names.push_back(std::string(buf)); + } else { + continue; + } + } +# endif +} +#else // ---------------------------------------------------------------------------- void get_camera_device_names(std::vector& device_names) { @@ -22,6 +85,7 @@ void get_camera_device_names(std::vector& device_names) device_names.push_back(device_name); } } +#endif // ---------------------------------------------------------------------------- void Camera::set_device_index(int index) @@ -81,43 +145,49 @@ bool Camera::get_frame(float dt, cv::Mat* frame) } // ---------------------------------------------------------------------------- -/* +#ifdef OPENTRACK_API void CVCamera::start() { - cap = cvCreateCameraCapture(desired_index); + cap = new VideoCapture(desired_index); // extract camera info - if (cap) + if (cap->isOpened()) { active = true; active_index = desired_index; - cam_info.res_x = cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH); - cam_info.res_y = cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT); + cam_info.res_x = cap->get(CV_CAP_PROP_FRAME_WIDTH); + cam_info.res_y = cap->get(CV_CAP_PROP_FRAME_HEIGHT); } + else { + delete cap; + cap = NULL; + } } void CVCamera::stop() { - if (cap) cvReleaseCapture(&cap); + if (cap) + { + cap->release(); + delete cap; + } active = false; } bool CVCamera::_get_frame(Mat* frame) { - if (cap && cvGrabFrame(cap) != 0) + if (cap) { - // retrieve frame - IplImage* _img = cvRetrieveFrame(cap, 0); - if(_img) - { - if(_img->origin == IPL_ORIGIN_TL) - *frame = Mat(_img); - else - { - Mat temp(_img); - flip(temp, *frame, 0); - } - return true; - } + Mat img; + /* + * XXX some Windows webcams fail to decode first + * frames and then some every once in a while + * -sh + */ + while (!cap->read(img))\ + ;; + + *frame = img; + return true; } return false; } @@ -134,21 +204,30 @@ void CVCamera::_set_f() void CVCamera::_set_fps() { - if (cap) cvSetCaptureProperty(cap, CV_CAP_PROP_FPS, cam_desired.fps); + if (cap) cap->set(CV_CAP_PROP_FPS, cam_desired.fps); } void CVCamera::_set_res() { if (cap) { - cvSetCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH, cam_desired.res_x); - cvSetCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT, cam_desired.res_y); - cam_info.res_x = cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_WIDTH); - cam_info.res_y = cvGetCaptureProperty(cap, CV_CAP_PROP_FRAME_HEIGHT); + cap->set(CV_CAP_PROP_FRAME_WIDTH, cam_desired.res_x); + cap->set(CV_CAP_PROP_FRAME_HEIGHT, cam_desired.res_y); + cam_info.res_x = cap->get(CV_CAP_PROP_FRAME_WIDTH); + cam_info.res_y = cap->get(CV_CAP_PROP_FRAME_HEIGHT); } } -*/ +void CVCamera::_set_device_index() +{ + if (cap) + { + cap->release(); + delete cap; + } + cap = new VideoCapture(desired_index); +} +#else // ---------------------------------------------------------------------------- VICamera::VICamera() : frame_buffer(NULL) { @@ -234,7 +313,7 @@ void VICamera::_set_res() { if (active) restart(); } - +#endif // ---------------------------------------------------------------------------- Mat FrameRotation::rotate_frame(Mat frame) @@ -260,4 +339,4 @@ Mat FrameRotation::rotate_frame(Mat frame) default: return frame; } -} \ No newline at end of file +} diff --git a/FTNoIR_Tracker_PT/camera.h b/FTNoIR_Tracker_PT/camera.h index c0876d0a..d1a8104c 100644 --- a/FTNoIR_Tracker_PT/camera.h +++ b/FTNoIR_Tracker_PT/camera.h @@ -9,8 +9,13 @@ #define CAMERA_H #include -#include "videoInput/videoInput.h" -#include +#ifndef OPENTRACK_API +# include +#else +# include "FTNoIR_Tracker_PT/boost-compat.h" +# include +# include +#endif #include // ---------------------------------------------------------------------------- @@ -33,50 +38,50 @@ struct CamInfo class Camera { public: - Camera() : dt_valid(0), dt_mean(0), desired_index(0), active_index(-1), active(false) {} - virtual ~Camera() {} + Camera() : dt_valid(0), dt_mean(0), desired_index(0), active_index(-1), active(false) {} + virtual ~Camera() {} - // start/stop capturing - virtual void start() = 0; - virtual void stop() = 0; - void restart() { stop(); start(); } + // start/stop capturing + virtual void start() = 0; + virtual void stop() = 0; + void restart() { stop(); start(); } - // calls corresponding template methods and reinitializes frame rate calculation - void set_device_index(int index); - void set_f(float f); - void set_fps(int fps); - void set_res(int x_res, int y_res); + // calls corresponding template methods and reinitializes frame rate calculation + void set_device_index(int index); + void set_f(float f); + void set_fps(int fps); + void set_res(int x_res, int y_res); - // gets a frame from the camera, dt: time since last call in seconds - bool get_frame(float dt, cv::Mat* frame); + // gets a frame from the camera, dt: time since last call in seconds + bool get_frame(float dt, cv::Mat* frame); - // WARNING: returned references are valid as long as object - const CamInfo& get_info() const { return cam_info; } - const CamInfo& get_desired() const { return cam_desired; } + // WARNING: returned references are valid as long as object + const CamInfo& get_info() const { return cam_info; } + const CamInfo& get_desired() const { return cam_desired; } protected: - // get a frame from the camera - virtual bool _get_frame(cv::Mat* frame) = 0; - - // update the camera using cam_desired, write res and f to cam_info if successful - virtual void _set_device_index() = 0; - virtual void _set_f() = 0; - virtual void _set_fps() = 0; - virtual void _set_res() = 0; - - bool active; - int desired_index; - int active_index; - CamInfo cam_info; - CamInfo cam_desired; - float dt_valid; - float dt_mean; + // get a frame from the camera + virtual bool _get_frame(cv::Mat* frame) = 0; + + // update the camera using cam_desired, write res and f to cam_info if successful + virtual void _set_device_index() = 0; + virtual void _set_f() = 0; + virtual void _set_fps() = 0; + virtual void _set_res() = 0; + + bool active; + int desired_index; + int active_index; + CamInfo cam_info; + CamInfo cam_desired; + float dt_valid; + float dt_mean; }; // ---------------------------------------------------------------------------- // camera based on OpenCV's videoCapture -/* +#ifdef OPENTRACK_API class CVCamera : public Camera { public: @@ -92,11 +97,11 @@ protected: virtual void _set_f(); virtual void _set_fps(); virtual void _set_res(); + virtual void _set_device_index(); - CvCapture* cap; + cv::VideoCapture* cap; }; -*/ - +#else // ---------------------------------------------------------------------------- // Camera based on the videoInput library class VICamera : public Camera @@ -119,19 +124,20 @@ protected: cv::Mat new_frame; unsigned char* frame_buffer; }; +#endif +enum RotationType +{ + CLOCKWISE = -1, + ZERO = 0, + COUNTER_CLOCKWISE = 1 +}; // ---------------------------------------------------------------------------- class FrameRotation { public: - typedef enum Rotation - { - CLOCKWISE = -1, - ZERO = 0, - COUNTER_CLOCKWISE = 1 - }; - Rotation rotation; + RotationType rotation; cv::Mat rotate_frame(cv::Mat frame); }; diff --git a/FTNoIR_Tracker_PT/frame_observer.cpp b/FTNoIR_Tracker_PT/frame_observer.cpp index 7e4bb3e3..281f3d57 100644 --- a/FTNoIR_Tracker_PT/frame_observer.cpp +++ b/FTNoIR_Tracker_PT/frame_observer.cpp @@ -11,8 +11,8 @@ FrameProvider::~FrameProvider() { QMutexLocker lock(&observer_mutex); - for (auto iter=frame_observers.begin(); iter!=frame_observers.end(); ++iter) + for (std::set::iterator iter=frame_observers.begin(); iter!=frame_observers.end(); ++iter) { (*iter)->on_frame_provider_destroy(); } -} \ No newline at end of file +} diff --git a/FTNoIR_Tracker_PT/frame_observer.h b/FTNoIR_Tracker_PT/frame_observer.h index 4afbd72c..585a6ee7 100644 --- a/FTNoIR_Tracker_PT/frame_observer.h +++ b/FTNoIR_Tracker_PT/frame_observer.h @@ -10,7 +10,11 @@ #include #include -#include +#ifndef OPENTRACK_API +# include +#else +# include "FTNoIR_Tracker_PT/boost-compat.h" +#endif #include //----------------------------------------------------------------------------- @@ -69,4 +73,4 @@ private: FrameObserver(const FrameObserver&); }; -#endif //FRAME_OBSERVER_H \ No newline at end of file +#endif //FRAME_OBSERVER_H diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index 88a3fc8d..d4741101 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -12,6 +12,10 @@ #include #include +#ifdef OPENTRACK_API +# define VideoWidget VideoWidget2 +#endif + using namespace std; using namespace cv; using namespace boost; @@ -92,6 +96,9 @@ void Tracker::run() const std::vector& points = point_extractor.extract_points(frame, dt, has_observers()); tracking_valid = point_tracker.track(points, camera.get_info().f, dt); frame_count++; +#ifdef OPENTRACK_API + video_widget->update_image(frame); +#endif } #ifdef PT_PERF_LOG log_stream<<"dt: "<(settings.cam_roll); + frame_rotation.rotation = static_cast(settings.cam_roll); point_extractor.threshold_val = settings.threshold; point_extractor.min_size = settings.min_point_size; - point_extractor.max_size = settings.max_point_size; - point_tracker.point_model = boost::shared_ptr(new PointModel(settings.M01, settings.M02)); - point_tracker.dynamic_pose_resolution = settings.dyn_pose_res; + point_extractor.max_size = settings.max_point_size; +#ifdef OPENTRACK_API + point_tracker.point_model.reset(new PointModel(settings.M01, settings.M02)); +#else + point_tracker.point_model = boost::shared_ptr(new PointModel(settings.M01, settings.M02)); +#endif + point_tracker.dynamic_pose_resolution = settings.dyn_pose_res; sleep_time = settings.sleep_time; point_tracker.dt_reset = settings.reset_time / 1000.0; show_video_widget = settings.video_widget; @@ -181,7 +192,6 @@ void Tracker::update_show_video_widget() { const int VIDEO_FRAME_WIDTH = 252; const int VIDEO_FRAME_HEIGHT = 189; - video_widget = new VideoWidget(video_frame, this); QHBoxLayout* video_layout = new QHBoxLayout(); video_layout->setContentsMargins(0, 0, 0, 0); @@ -208,15 +218,28 @@ void Tracker::refreshVideo() if (video_widget) video_widget->update_frame_and_points(); } +#ifdef OPENTRACK_API +void Tracker::StartTracker(QFrame *parent_window) +#else void Tracker::StartTracker(HWND parent_window) +#endif { +#ifdef OPENTRACK_API + Initialize(parent_window); +#endif reset_command(PAUSE); } +#ifndef OPENTRACK_API void Tracker::StopTracker(bool exit) { set_command(PAUSE); } +#endif + +#ifdef OPENTRACK_API +#define THeadPoseData double +#endif bool Tracker::GiveHeadPoseData(THeadPoseData *data) { @@ -228,14 +251,20 @@ bool Tracker::GiveHeadPoseData(THeadPoseData *data) FrameTrafo X_CM = point_tracker.get_pose(); FrameTrafo X_MH(Matx33f::eye(), t_MH); FrameTrafo X_GH = R_GC * X_CM * X_MH; - Matx33f R = X_GH.R * X_GH_0.R.t(); + Matx33f R = X_GH.R * X_GH_0.R.t(); Vec3f t = X_GH.t - X_GH_0.t; +#ifndef OPENTRACK_API // get translation(s) if (bEnableX) data->x = t[0] / 10.0; // convert to cm if (bEnableY) data->y = t[1] / 10.0; if (bEnableZ) data->z = t[2] / 10.0; - +#else + // get translation(s) + if (bEnableX) data[TX] = t[0] / 10.0; // convert to cm + if (bEnableY) data[TY] = t[1] / 10.0; + if (bEnableZ) data[TZ] = t[2] / 10.0; +#endif // translate rotation matrix from opengl (G) to roll-pitch-yaw (E) frame // -z -> x, y -> z, x -> -y Matx33f R_EG( 0, 0,-1, @@ -249,17 +278,26 @@ bool Tracker::GiveHeadPoseData(THeadPoseData *data) alpha = atan2( R(1,0), R(0,0)); gamma = atan2( R(2,1), R(2,2)); +#ifndef OPENTRACK_API if (bEnableYaw) data->yaw = rad2deg * alpha; if (bEnablePitch) data->pitch = - rad2deg * beta; // FTNoIR expects a minus here if (bEnableRoll) data->roll = rad2deg * gamma; +#else + if (bEnableYaw) data[Yaw] = rad2deg * alpha; + if (bEnablePitch) data[Pitch] = - rad2deg * beta; // FTNoIR expects a minus here + if (bEnableRoll) data[Roll] = rad2deg * gamma; +#endif } return true; } //----------------------------------------------------------------------------- +#ifdef OPENTRACK_API +extern "C" FTNOIR_TRACKER_BASE_EXPORT ITracker* CALLING_CONVENTION GetConstructor() +#else #pragma comment(linker, "/export:GetTracker=_GetTracker@0") - FTNOIR_TRACKER_BASE_EXPORT ITrackerPtr __stdcall GetTracker() +#endif { return new Tracker; -} \ No newline at end of file +} diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.h b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.h index 6eef945a..867378d1 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.h +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.h @@ -8,7 +8,12 @@ #ifndef FTNOIR_TRACKER_PT_H #define FTNOIR_TRACKER_PT_H -#include "..\ftnoir_tracker_base\ftnoir_tracker_base.h" +#ifdef OPENTRACK_API +# include "ftnoir_tracker_base/ftnoir_tracker_base.h" +# include "facetracknoir/global-settings.h" +#else +# include "..\ftnoir_tracker_base\ftnoir_tracker_base.h" +#endif #include "ftnoir_tracker_pt_settings.h" #include "frame_observer.h" #include "camera.h" @@ -19,9 +24,14 @@ #include #include +#include #include #include -#include +#ifndef OPENTRACK_API +# include +#else +# include "FTNoIR_Tracker_PT/boost-compat.h" +#endif #include //----------------------------------------------------------------------------- @@ -34,10 +44,16 @@ public: // --- ITracker interface --- virtual void Initialize(QFrame *videoframe); +#ifdef OPENTRACK_API + virtual void StartTracker(QFrame* parent_window); + virtual void WaitForExit() {} + virtual bool GiveHeadPoseData(double* data); +#else virtual void StartTracker(HWND parent_window); - virtual void StopTracker(bool exit); - virtual bool GiveHeadPoseData(THeadPoseData *data); - virtual void refreshVideo(); + virtual void StopTracker(bool exit); + virtual bool GiveHeadPoseData(THeadPoseData *data); +#endif + virtual void refreshVideo(); void apply(const TrackerSettings& settings); void center(); @@ -66,7 +82,12 @@ protected: int sleep_time; // --- tracking chain --- +#ifdef OPENTRACK_API +#define VideoWidget VideoWidget2 + CVCamera camera; +#else VICamera camera; +#endif FrameRotation frame_rotation; PointExtractor point_extractor; PointTracker point_tracker; @@ -96,4 +117,6 @@ protected: Timer time; }; +#undef VideoWidget + #endif // FTNOIR_TRACKER_PT_H diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp index c99f4d67..fe995163 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp @@ -10,7 +10,11 @@ #include #include #include -#include +#ifndef OPENTRACK_API +# include +#else +# include "FTNoIR_Tracker_PT/boost-compat.h" +#endif #include using namespace std; @@ -39,7 +43,7 @@ TrackerDialog::TrackerDialog() vector device_names; get_camera_device_names(device_names); - for (auto iter = device_names.begin(); iter != device_names.end(); ++iter) + for (vector::iterator iter = device_names.begin(); iter != device_names.end(); ++iter) { ui.camdevice_combo->addItem(iter->c_str()); } @@ -398,9 +402,12 @@ void TrackerDialog::unRegisterTracker() } //----------------------------------------------------------------------------- +#ifdef OPENTRACK_API +extern "C" FTNOIR_TRACKER_BASE_EXPORT ITrackerDialog* CALLING_CONVENTION GetDialog( ) +#else #pragma comment(linker, "/export:GetTrackerDialog=_GetTrackerDialog@0") - FTNOIR_TRACKER_BASE_EXPORT ITrackerDialogPtr __stdcall GetTrackerDialog( ) +#endif { return new TrackerDialog; -} \ No newline at end of file +} diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.h b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.h index bf0a90f2..14df7ede 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.h +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.h @@ -8,7 +8,11 @@ #ifndef FTNOIR_TRACKER_PT_DIALOG_H #define FTNOIR_TRACKER_PT_DIALOG_H +#ifdef OPENTRACK_API +#include "ftnoir_tracker_base/ftnoir_tracker_base.h" +#else #include "..\ftnoir_tracker_base\ftnoir_tracker_base.h" +#endif #include "ftnoir_tracker_pt_settings.h" #include "ftnoir_tracker_pt.h" #include "trans_calib.h" @@ -113,4 +117,4 @@ protected: Ui::UICPTClientControls ui; }; -#endif //FTNOIR_TRACKER_PT_DIALOG_H \ No newline at end of file +#endif //FTNOIR_TRACKER_PT_DIALOG_H diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.cpp index 3a73f679..be1705c0 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.cpp @@ -6,7 +6,6 @@ */ #include "ftnoir_tracker_pt_dll.h" - #include //----------------------------------------------------------------------------- @@ -31,10 +30,13 @@ void TrackerDll::getIcon(QIcon *icon) } -//----------------------------------------------------------------------------- -#pragma comment(linker, "/export:GetTrackerDll=_GetTrackerDll@0") - +#ifdef OPENTRACK_API +# include "facetracknoir/global-settings.h" +extern "C" FTNOIR_TRACKER_BASE_EXPORT Metadata* CALLING_CONVENTION GetMetadata() +#else +# pragma comment(linker, "/export:GetTrackerDll=_GetTrackerDll@0") FTNOIR_TRACKER_BASE_EXPORT ITrackerDllPtr __stdcall GetTrackerDll() +#endif { return new TrackerDll; } diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.h b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.h index 15ad63aa..18283837 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.h +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dll.h @@ -5,15 +5,27 @@ * copyright notice and this permission notice appear in all copies. */ -#include "..\ftnoir_tracker_base\ftnoir_tracker_base.h" +#if defined(OPENTRACK_API) +# include "ftnoir_tracker_base/ftnoir_tracker_base.h" +# include "facetracknoir/global-settings.h" +#else +# include "../ftnoir_tracker_base/ftnoir_tracker_base.h" +#endif //----------------------------------------------------------------------------- -class TrackerDll : public ITrackerDll +class TrackerDll : +#if defined(OPENTRACK_API) + public Metadata +#else + public ITrackerDll +#endif { // ITrackerDll interface +#ifndef OPENTRACK_API void Initialize() {} +#endif void getFullName(QString *strToBeFilled); void getShortName(QString *strToBeFilled); void getDescription(QString *strToBeFilled); void getIcon(QIcon *icon); -}; \ No newline at end of file +}; diff --git a/FTNoIR_Tracker_PT/point_extractor.cpp b/FTNoIR_Tracker_PT/point_extractor.cpp index 76a152a7..261de60f 100644 --- a/FTNoIR_Tracker_PT/point_extractor.cpp +++ b/FTNoIR_Tracker_PT/point_extractor.cpp @@ -22,7 +22,7 @@ const vector& PointExtractor::extract_points(Mat frame, float dt, bool dr // convert to grayscale Mat frame_gray; - cvtColor(frame, frame_gray, CV_RGB2GRAY); + cvtColor(frame, frame_gray, CV_RGB2GRAY); // convert to binary Mat frame_bin; diff --git a/FTNoIR_Tracker_PT/point_extractor.h b/FTNoIR_Tracker_PT/point_extractor.h index b142d2bb..c62d34f9 100644 --- a/FTNoIR_Tracker_PT/point_extractor.h +++ b/FTNoIR_Tracker_PT/point_extractor.h @@ -9,6 +9,7 @@ #define POINTEXTRACTOR_H #include +#include // ---------------------------------------------------------------------------- // Extracts points from an opencv image diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index acde9154..8b508897 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -69,6 +69,13 @@ PointModel::PointModel(Vec3f M01, Vec3f M02) get_d_order(points, d_order); } +#ifdef OPENTRACK_API +static bool d_vals_sort(const pair a, const pair b) +{ + return a.first < b.first; +} +#endif + void PointModel::get_d_order(const std::vector& points, int d_order[]) const { // get sort indices with respect to d scalar product @@ -77,10 +84,17 @@ void PointModel::get_d_order(const std::vector& points, int d_order[] d_vals.push_back(pair(d.dot(points[i]), i)); struct - { - bool operator()(const pair& a, const pair& b) { return a.first < b.first; } - } comp; - sort(d_vals.begin(), d_vals.end(), comp); + { + bool operator()(const pair& a, const pair& b) { return a.first < b.first; } + } comp; + std::sort(d_vals.begin(), + d_vals.end(), +#ifdef OPENTRACK_API + d_vals_sort +#else + comp +#endif + ); for (int i = 0; i& points, float f, float dt) reset(); } + bool no_model = +#ifdef OPENTRACK_API + point_model.get() == NULL; +#else + !point_model; +#endif + // if there is a pointtracking problem, reset the velocities - if (!point_model || points.size() != PointModel::N_POINTS) + if (no_model || points.size() != PointModel::N_POINTS) { //qDebug()<<"Wrong number of points!"; reset_velocities(); diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index f52e9dd7..11034100 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -9,7 +9,11 @@ #define POINTTRACKER_H #include -#include +#ifndef OPENTRACK_API +# include +#else +# include "FTNoIR_Tracker_PT/boost-compat.h" +#endif #include // ---------------------------------------------------------------------------- diff --git a/FTNoIR_Tracker_PT/timer.cpp b/FTNoIR_Tracker_PT/timer.cpp index 363b5b09..ed7606e5 100644 --- a/FTNoIR_Tracker_PT/timer.cpp +++ b/FTNoIR_Tracker_PT/timer.cpp @@ -55,7 +55,7 @@ double Timer::elapsed() startTime = startCount.QuadPart * (1e3 / frequency.QuadPart); endTime = endCount.QuadPart * (1e3 / frequency.QuadPart); #else - if(!stopped) + if(running) gettimeofday(&endCount, NULL); startTime = (startCount.tv_sec * 1e3) + startCount.tv_usec; @@ -63,4 +63,4 @@ double Timer::elapsed() #endif return endTime - startTime; -} \ No newline at end of file +} diff --git a/FTNoIR_Tracker_PT/video_widget.cpp b/FTNoIR_Tracker_PT/video_widget.cpp index 236faaf7..0d31620f 100644 --- a/FTNoIR_Tracker_PT/video_widget.cpp +++ b/FTNoIR_Tracker_PT/video_widget.cpp @@ -14,8 +14,10 @@ using namespace cv; using namespace std; +#ifndef OPENTRACK_API using namespace boost; - +#endif +#ifndef OPENTRACK_API // ---------------------------------------------------------------------------- void VideoWidget::initializeGL() { @@ -100,20 +102,44 @@ void VideoWidget::update_frame_and_points() updateGL(); } +#else +void VideoWidget2::update_image(const cv::Mat& frame) +{ + QMutexLocker foo(&mtx); + QImage qframe = QImage(frame.cols, frame.rows, QImage::Format_RGB888); + uchar* data = qframe.bits(); + const int pitch = qframe.bytesPerLine(); + for (int y = 0; y < frame.rows; y++) + for (int x = 0; x < frame.cols; x++) + { + const int pos = 3 * (y*frame.cols + x); + data[y * pitch + x * 3 + 0] = frame.data[pos + 2]; + data[y * pitch + x * 3 + 1] = frame.data[pos + 1]; + data[y * pitch + x * 3 + 2] = frame.data[pos + 0]; + } + qframe = qframe.scaled(size(), Qt::IgnoreAspectRatio, Qt::FastTransformation); + pixmap = QPixmap::fromImage(qframe); +} +#endif + // ---------------------------------------------------------------------------- -VideoWidgetDialog::VideoWidgetDialog(QWidget *parent, FrameProvider* provider) - : QDialog(parent), - video_widget(NULL) +VideoWidgetDialog::VideoWidgetDialog(QWidget *parent, FrameProvider* provider) + : QDialog(parent), + video_widget(NULL) { - const int VIDEO_FRAME_WIDTH = 640; - const int VIDEO_FRAME_HEIGHT = 480; - - video_widget = new VideoWidget(this, provider); - - QHBoxLayout* layout = new QHBoxLayout(); - layout->setContentsMargins(0, 0, 0, 0); - layout->addWidget(video_widget); - if (this->layout()) delete this->layout(); - setLayout(layout); - resize(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT); -} \ No newline at end of file + const int VIDEO_FRAME_WIDTH = 640; + const int VIDEO_FRAME_HEIGHT = 480; + +#ifdef OPENTRACK_API + video_widget = new VideoWidget2(this, provider); +#else + video_widget = new VideoWidget(this, provider); +#endif + + QHBoxLayout* layout = new QHBoxLayout(); + layout->setContentsMargins(0, 0, 0, 0); + layout->addWidget(video_widget); + if (this->layout()) delete this->layout(); + setLayout(layout); + resize(VIDEO_FRAME_WIDTH, VIDEO_FRAME_HEIGHT); +} diff --git a/FTNoIR_Tracker_PT/video_widget.h b/FTNoIR_Tracker_PT/video_widget.h index dd5fb642..3164dacc 100644 --- a/FTNoIR_Tracker_PT/video_widget.h +++ b/FTNoIR_Tracker_PT/video_widget.h @@ -10,12 +10,23 @@ #include "frame_observer.h" -#include #include #include #include -#include +#ifndef OPENTRACK_API +# include +# include +#else +# include "FTNoIR_Tracker_PT/boost-compat.h" +# if defined(_WIN32) +# include +# endif +#endif +#include +#include +#include +#ifndef OPENTRACK_API // ---------------------------------------------------------------------------- // OpenGL based widget to display an OpenCV image with some points on top class VideoWidget : public QGLWidget, public FrameObserver @@ -23,7 +34,7 @@ class VideoWidget : public QGLWidget, public FrameObserver Q_OBJECT public: - VideoWidget(QWidget *parent, FrameProvider* provider) : QGLWidget(parent), FrameObserver(provider) {} + VideoWidget(QWidget *parent, FrameProvider* provider) : QGLWidget(parent), FrameObserver(provider) {} virtual void initializeGL(); virtual void resizeGL(int w, int h); @@ -40,19 +51,45 @@ private: boost::shared_ptr< std::vector > points; }; +#else +/* Qt moc likes to skip over preprocessor directives -sh */ +class VideoWidget2 : public QWidget, public FrameObserver +{ + Q_OBJECT + +public: + VideoWidget2(QWidget *parent, FrameProvider* provider) : QWidget(parent), /* to avoid linker errors */ FrameObserver(provider) { + connect(&timer, SIGNAL(timeout()), this, SLOT(update())); + timer.start(45); + } + void update_image(const cv::Mat &frame); + void update_frame_and_points() {} +protected slots: + void paintEvent( QPaintEvent* e ) { + QMutexLocker foo(&mtx); + QPainter painter(this); + painter.drawPixmap(e->rect(), pixmap, e->rect()); + } +private: + QMutex mtx; + QPixmap pixmap; + QTimer timer; +}; +#endif // ---------------------------------------------------------------------------- // A VideoWidget embedded in a dialog frame class VideoWidgetDialog : public QDialog { + Q_OBJECT public: VideoWidgetDialog(QWidget *parent, FrameProvider* provider); virtual ~VideoWidgetDialog() {} - VideoWidget* get_video_widget() { return video_widget; } + VideoWidget2* get_video_widget() { return video_widget; } private: - VideoWidget* video_widget; + VideoWidget2* video_widget; }; #endif // VIDEOWIDGET_H diff --git a/facetracknoir/rotation.h b/facetracknoir/rotation.h index e97ec0f0..747a23f9 100644 --- a/facetracknoir/rotation.h +++ b/facetracknoir/rotation.h @@ -9,15 +9,15 @@ #define ROTATION_H #include // ---------------------------------------------------------------------------- -class Rotation { +class RotationType { public: - Rotation() : a(1.0),b(0.0),c(0.0),d(0.0) {} - Rotation(double yaw, double pitch, double roll) { fromEuler(yaw, pitch, roll); } - Rotation(double a, double b, double c, double d) : a(a),b(b),c(c),d(d) {} + RotationType() : a(1.0),b(0.0),c(0.0),d(0.0) {} + RotationType(double yaw, double pitch, double roll) { fromEuler(yaw, pitch, roll); } + RotationType(double a, double b, double c, double d) : a(a),b(b),c(c),d(d) {} - Rotation inv(){ // inverse - return Rotation(a,-b,-c, -d); + RotationType inv(){ // inverse + return RotationType(a,-b,-c, -d); } @@ -46,10 +46,10 @@ public: yaw = atan2(2.0*(a*d + b*c), 1.0 - 2.0*(c*c + d*d)); } - const Rotation operator*(const Rotation& B) + const RotationType operator*(const RotationType& B) { - const Rotation& A = *this; - return Rotation(A.a*B.a - A.b*B.b - A.c*B.c - A.d*B.d, // quaternion multiplication + const RotationType& A = *this; + return RotationType(A.a*B.a - A.b*B.b - A.c*B.c - A.d*B.d, // quaternion multiplication A.a*B.b + A.b*B.a + A.c*B.d - A.d*B.c, A.a*B.c - A.b*B.d + A.c*B.a + A.d*B.b, A.a*B.d + A.b*B.c - A.c*B.b + A.d*B.a); diff --git a/facetracknoir/tracker_types.cpp b/facetracknoir/tracker_types.cpp index da246722..dec4ff81 100644 --- a/facetracknoir/tracker_types.cpp +++ b/facetracknoir/tracker_types.cpp @@ -7,9 +7,9 @@ T6DOF operator-(const T6DOF& A, const T6DOF& B) { - Rotation R_A(A.axes[Yaw]*D2R, A.axes[Pitch]*D2R, A.axes[Roll]*D2R); - Rotation R_B(B.axes[Yaw]*D2R, B.axes[Pitch]*D2R, B.axes[Roll]*D2R); - Rotation R_C = R_A * R_B.inv(); + RotationType R_A(A.axes[Yaw]*D2R, A.axes[Pitch]*D2R, A.axes[Roll]*D2R); + RotationType R_B(B.axes[Yaw]*D2R, B.axes[Pitch]*D2R, B.axes[Roll]*D2R); + RotationType R_C = R_A * R_B.inv(); T6DOF C; R_C.toEuler(C.axes[Yaw], C.axes[Pitch], C.axes[Roll]); @@ -26,9 +26,9 @@ T6DOF operator-(const T6DOF& A, const T6DOF& B) T6DOF operator+(const T6DOF& A, const T6DOF& B) { - Rotation R_A(A.axes[Yaw]*D2R, A.axes[Pitch]*D2R, A.axes[Roll]*D2R); - Rotation R_B(B.axes[Yaw]*D2R, B.axes[Pitch]*D2R, B.axes[Roll]*D2R); - Rotation R_C = R_A * R_B; + RotationType R_A(A.axes[Yaw]*D2R, A.axes[Pitch]*D2R, A.axes[Roll]*D2R); + RotationType R_B(B.axes[Yaw]*D2R, B.axes[Pitch]*D2R, B.axes[Roll]*D2R); + RotationType R_C = R_A * R_B; T6DOF C; R_C.toEuler(C.axes[Yaw], C.axes[Pitch], C.axes[Roll]); -- cgit v1.2.3 From cc4e7e331ba24b260edc44db8b09de274cd658a7 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Tue, 7 Jan 2014 09:00:57 +0100 Subject: pose breaks with negative pitch --- FTNoIR_Tracker_PT/point_tracker.cpp | 4 ++-- ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 8b508897..dfefdaf8 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -267,8 +267,8 @@ int PointTracker::POSIT(float f) // initial pose = last (predicted) pose Vec3f k; - get_row(X_CM.R, 2, k); - float Z0 = X_CM.t[2]; + get_row(R_expected, 2, k); + float Z0 = init_phase ? 1000 : X_CM.t[2]; float old_epsilon_1 = 0; float old_epsilon_2 = 0; diff --git a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp index 0719faac..93b43de7 100644 --- a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp +++ b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp @@ -335,7 +335,7 @@ void Tracker::run() { cv::Vec3d euler = cv::RQDecomp3x3(rotation_matrix, junk1, junk2); - if (fabs(euler[0] - last_pitch) > pitch_eps) + if (fabs(euler[0] - last_pitch) > pitch_eps || euler[0] < 0) { first = true; last_pitch = euler[0]; -- cgit v1.2.3 From 267010ba42b00cfd1ecc73c86d99c647ff191175 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Wed, 8 Jan 2014 19:40:06 +0100 Subject: use levmarq instead of coplanar POSIT implemented in numerically unstable fashion Signed-off-by: Stanislaw Halik --- FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui | 13 +- FTNoIR_Tracker_PT/camera.cpp | 8 +- FTNoIR_Tracker_PT/camera.h | 6 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 4 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp | 2 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h | 4 +- FTNoIR_Tracker_PT/point_tracker.cpp | 191 ++++++------------------- FTNoIR_Tracker_PT/point_tracker.h | 33 ++++- 8 files changed, 92 insertions(+), 169 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui b/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui index bdbed955..461253cf 100644 --- a/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui +++ b/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui @@ -515,23 +515,26 @@ - F/W + FOV - f_dspin + fov_dspin - + The camera's focal length devided by its sensor width 2 + + 1.000000000000000 + - 0.100000000000000 + 1.000000000000000 @@ -1733,7 +1736,7 @@ res_x_spin res_y_spin fps_spin - f_dspin + fov_dspin camroll_combo campitch_spin camyaw_spin diff --git a/FTNoIR_Tracker_PT/camera.cpp b/FTNoIR_Tracker_PT/camera.cpp index 754533c5..8986be60 100644 --- a/FTNoIR_Tracker_PT/camera.cpp +++ b/FTNoIR_Tracker_PT/camera.cpp @@ -108,11 +108,11 @@ void Camera::set_device_index(int index) } } -void Camera::set_f(float f) +void Camera::set_fov(float f) { - if (cam_desired.f != f) + if (cam_desired.fov != f) { - cam_desired.f = f; + cam_desired.fov = f; _set_f(); } } @@ -208,7 +208,7 @@ void CVCamera::_set_index() void CVCamera::_set_f() { - cam_info.f = cam_desired.f; + cam_info.fov = cam_desired.fov; } void CVCamera::_set_fps() diff --git a/FTNoIR_Tracker_PT/camera.h b/FTNoIR_Tracker_PT/camera.h index ea68c387..6768e419 100644 --- a/FTNoIR_Tracker_PT/camera.h +++ b/FTNoIR_Tracker_PT/camera.h @@ -25,12 +25,12 @@ void get_camera_device_names(std::vector& device_names); // ---------------------------------------------------------------------------- struct CamInfo { - CamInfo() : res_x(0), res_y(0), fps(0), f(1) {} + CamInfo() : res_x(0), res_y(0), fps(0), fov(56) {} int res_x; int res_y; int fps; - float f; // (focal length) / (sensor width) + float fov; }; // ---------------------------------------------------------------------------- @@ -48,7 +48,7 @@ public: // calls corresponding template methods and reinitializes frame rate calculation void set_device_index(int index); - void set_f(float f); + void set_fov(float f); void set_fps(int fps); void set_res(int x_res, int y_res); diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index 6bcad861..a3e8919b 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -84,7 +84,7 @@ void Tracker::run() { frame = frame_rotation.rotate_frame(frame); const std::vector& points = point_extractor.extract_points(frame, dt, false); - tracking_valid = point_tracker.track(points, camera.get_info().f, dt); + tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows, t_MH); video_widget->update_image(frame); } #ifdef PT_PERF_LOG @@ -104,7 +104,7 @@ void Tracker::apply(settings& s) camera.set_device_index(s.cam_index); camera.set_res(s.cam_res_x, s.cam_res_y); camera.set_fps(s.cam_fps); - camera.set_f(s.cam_f); + camera.set_fov(s.cam_fov); frame_rotation.rotation = static_cast(static_cast(s.cam_roll)); point_extractor.threshold_val = s.threshold; point_extractor.threshold_secondary_val = s.threshold_secondary; diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp index c103b78c..4ae20f48 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp @@ -46,7 +46,7 @@ TrackerDialog::TrackerDialog() tie_setting(s.reset_time, ui.reset_spin); tie_setting(s.cam_index, ui.camdevice_combo); - tie_setting(s.cam_f, ui.f_dspin); + tie_setting(s.cam_fov, ui.fov_dspin); tie_setting(s.cam_res_x, ui.res_x_spin); tie_setting(s.cam_res_y, ui.res_y_spin); tie_setting(s.cam_fps, ui.fps_spin); diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h index 564e1264..e0dfa2e6 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h @@ -28,7 +28,7 @@ struct settings threshold_secondary, min_point_size, max_point_size; - value cam_f; + value cam_fov; value m01_x, m01_y, m01_z; value m02_x, m02_y, m02_z; @@ -57,7 +57,7 @@ struct settings threshold_secondary(b, "threshold-secondary", 128), min_point_size(b, "min-point-size", 10), max_point_size(b, "max-point-size", 50), - cam_f(b, "camera-focal-length", 1), + cam_fov(b, "camera-fov", 56), m01_x(b, "m_01-x", 0), m01_y(b, "m_01-y", 0), m01_z(b, "m_01-z", 0), diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index dfefdaf8..1df70b17 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -17,23 +17,6 @@ using namespace cv; using namespace boost; using namespace std; -const float PI = 3.14159265358979323846f; - -// ---------------------------------------------------------------------------- -static void get_row(const Matx33f& m, int i, Vec3f& v) -{ - v[0] = m(i,0); - v[1] = m(i,1); - v[2] = m(i,2); -} - -static void set_row(Matx33f& m, int i, const Vec3f& v) -{ - m(i,0) = v[0]; - m(i,1) = v[1]; - m(i,2) = v[2]; -} - // ---------------------------------------------------------------------------- PointModel::PointModel(Vec3f M01, Vec3f M02) : M01(M01), @@ -108,9 +91,11 @@ PointTracker::PointTracker() dt_reset(1), v_t(0,0,0), v_r(0,0,0), - dynamic_pose_resolution(true) + dynamic_pose_resolution(true), + fov(0), + _w(0), + _h(0) { - X_CM.t[2] = 1000; // default position: 1 m away from cam; } void PointTracker::reset() @@ -128,7 +113,7 @@ void PointTracker::reset_velocities() } -bool PointTracker::track(const vector& points, float f, float dt) +bool PointTracker::track(const vector& points, float fov, float dt, int w, int h, const cv::Vec3f& headpos) { if (!dynamic_pose_resolution) init_phase = true; @@ -140,12 +125,7 @@ bool PointTracker::track(const vector& points, float f, float dt) reset(); } - bool no_model = -#ifdef OPENTRACK_API - point_model.get() == NULL; -#else - !point_model; -#endif + bool no_model = !point_model; // if there is a pointtracking problem, reset the velocities if (no_model || points.size() != PointModel::N_POINTS) @@ -161,7 +141,7 @@ bool PointTracker::track(const vector& points, float f, float dt) predict(dt_valid); // if there is a point correspondence problem something has gone wrong, do a reset - if (!find_correspondences(points, f)) + if (!find_correspondences(points)) { //qDebug()<<"Error in finding point correspondences!"; X_CM = X_CM_old; // undo prediction @@ -169,7 +149,8 @@ bool PointTracker::track(const vector& points, float f, float dt) return false; } - int n_iter = POSIT(f); + // XXX TODO fov + POSIT(fov, w, h, headpos); //qDebug()<<"Number of POSIT iterations: "<& points, float f) +bool PointTracker::find_correspondences(const vector& points) { if (init_phase) { // We do a simple freetrack-like sorting in the init phase... @@ -215,9 +196,9 @@ bool PointTracker::find_correspondences(const vector& points, float f) else { // ... otherwise we look at the distance to the projection of the expected model points // project model points under current pose - p_exp[0] = project(Vec3f(0,0,0), f); - p_exp[1] = project(point_model->M01, f); - p_exp[2] = project(point_model->M02, f); + p_exp[0] = project(Vec3f(0,0,0)); + p_exp[1] = project(point_model->M01); + p_exp[2] = project(point_model->M02); // set correspondences by minimum distance to projected model point bool point_taken[PointModel::N_POINTS]; @@ -251,130 +232,48 @@ bool PointTracker::find_correspondences(const vector& points, float f) -int PointTracker::POSIT(float f) +void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos) { - // POSIT algorithm for coplanar points as presented in - // [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"] - // we use the same notation as in the paper here - - // The expected rotation used for resolving the ambiguity in POSIT: - // In every iteration step the rotation closer to R_expected is taken - Matx33f R_expected; - if (init_phase) - R_expected = Matx33f::eye(); // in the init phase, we want to be close to the default pose = no rotation - else - R_expected = X_CM.R; // later we want to be close to the last (predicted) rotation - - // initial pose = last (predicted) pose - Vec3f k; - get_row(R_expected, 2, k); - float Z0 = init_phase ? 1000 : X_CM.t[2]; - - float old_epsilon_1 = 0; - float old_epsilon_2 = 0; - float epsilon_1 = 1; - float epsilon_2 = 1; - - Vec3f I0, J0; - Vec2f I0_coeff, J0_coeff; - - Vec3f I_1, J_1, I_2, J_2; - Matx33f R_1, R_2; - Matx33f* R_current; - - const int MAX_ITER = 100; - const float EPS_THRESHOLD = 1e-4; - - int i=1; - for (; iM01)/Z0; - epsilon_2 = k.dot(point_model->M02)/Z0; - - // vector of scalar products and - Vec2f I0_M0i(p[1][0]*(1.0 + epsilon_1) - p[0][0], - p[2][0]*(1.0 + epsilon_2) - p[0][0]); - Vec2f J0_M0i(p[1][1]*(1.0 + epsilon_1) - p[0][1], - p[2][1]*(1.0 + epsilon_2) - p[0][1]); - - // construct projection of I, J onto M0i plane: I0 and J0 - I0_coeff = point_model->P * I0_M0i; - J0_coeff = point_model->P * J0_M0i; - I0 = I0_coeff[0]*point_model->M01 + I0_coeff[1]*point_model->M02; - J0 = J0_coeff[0]*point_model->M01 + J0_coeff[1]*point_model->M02; - - // calculate u component of I, J - float II0 = I0.dot(I0); - float IJ0 = I0.dot(J0); - float JJ0 = J0.dot(J0); - float rho, theta; - if (JJ0 == II0) { - rho = sqrt(abs(2*IJ0)); - theta = -PI/4; - if (IJ0<0) theta *= -1; - } - else { - rho = sqrt(sqrt( (JJ0-II0)*(JJ0-II0) + 4*IJ0*IJ0 )); - theta = atan( -2*IJ0 / (JJ0-II0) ); - if (JJ0 - II0 < 0) theta += PI; - theta /= 2; - } + // XXX hack + this->fov = fov; + _w = w; + _h = h; + std::vector obj_points; + std::vector img_points; - // construct the two solutions - I_1 = I0 + rho*cos(theta)*point_model->u; - I_2 = I0 - rho*cos(theta)*point_model->u; + obj_points.push_back(headpos); + obj_points.push_back(point_model->M01 + headpos); + obj_points.push_back(point_model->M02 + headpos); - J_1 = J0 + rho*sin(theta)*point_model->u; - J_2 = J0 - rho*sin(theta)*point_model->u; + img_points.push_back(p[0]); + img_points.push_back(p[1]); + img_points.push_back(p[2]); - float norm_const = 1.0/norm(I_1); // all have the same norm - - // create rotation matrices - I_1 *= norm_const; J_1 *= norm_const; - I_2 *= norm_const; J_2 *= norm_const; + const float HT_PI = 3.1415926535; - set_row(R_1, 0, I_1); - set_row(R_1, 1, J_1); - set_row(R_1, 2, I_1.cross(J_1)); - - set_row(R_2, 0, I_2); - set_row(R_2, 1, J_2); - set_row(R_2, 2, I_2.cross(J_2)); + const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); + const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); - // the single translation solution - Z0 = norm_const * f; + cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); + intrinsics.at (0, 0) = focal_length_w; + intrinsics.at (1, 1) = focal_length_h; + intrinsics.at (0, 2) = w/2; + intrinsics.at (1, 2) = h/2; - // pick the rotation solution closer to the expected one - // in simple metric d(A,B) = || I - A * B^T || - float R_1_deviation = norm(Matx33f::eye() - R_expected * R_1.t()); - float R_2_deviation = norm(Matx33f::eye() - R_expected * R_2.t()); + cv::Mat dist_coeffs = cv::Mat::zeros(5, 1, CV_32FC1); - if (R_1_deviation < R_2_deviation) - R_current = &R_1; - else - R_current = &R_2; + bool lastp = !rvec.empty() && !tvec.empty() && !init_phase; - get_row(*R_current, 2, k); + cv::solvePnP(obj_points, img_points, intrinsics, dist_coeffs, rvec, tvec, lastp, cv::ITERATIVE); - // check for convergence condition - if (abs(epsilon_1 - old_epsilon_1) + abs(epsilon_2 - old_epsilon_2) < EPS_THRESHOLD) - break; - old_epsilon_1 = epsilon_1; - old_epsilon_2 = epsilon_2; - } + cv::Mat rmat; + cv::Rodrigues(rvec, rmat); // apply results - X_CM.R = *R_current; - X_CM.t[0] = p[0][0] * Z0/f; - X_CM.t[1] = p[0][1] * Z0/f; - X_CM.t[2] = Z0; - - return i; - - //Rodrigues(X_CM.R, r); - //qDebug()<<"iter: "<(i); + for (int j = 0; j < 3; j++) + X_CM.R(i, j) = rmat.at(i, j); + } } diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index 11034100..e05e8f98 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -90,7 +90,7 @@ public: // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5) // f : (focal length)/(sensor width) // dt : time since last call - bool track(const std::vector& points, float f, float dt); + bool track(const std::vector& points, float fov, float dt, int w, int h, const cv::Vec3f &headpos); boost::shared_ptr point_model; bool dynamic_pose_resolution; @@ -98,15 +98,35 @@ public: FrameTrafo get_pose() const { return X_CM; } void reset(); + float fov; + int _w, _h; protected: - inline cv::Vec2f project(const cv::Vec3f& v_M, float f) + cv::Vec2f project(const cv::Vec3f& v_M) { - cv::Vec3f v_C = X_CM * v_M; - return cv::Vec2f(f*v_C[0]/v_C[2], f*v_C[1]/v_C[2]); + if (!rvec.empty() && !tvec.empty() && fov > 0) + { + const float HT_PI = 3.1415926535; + const int w = _w, h = _h; + const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); + const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); + + cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); + intrinsics.at (0, 0) = focal_length_w; + intrinsics.at (1, 1) = focal_length_h; + intrinsics.at (0, 2) = w/2; + intrinsics.at (1, 2) = h/2; + std::vector xs; + xs.push_back(v_M); + cv::Mat dist_coeffs = cv::Mat::zeros(5, 1, CV_32FC1); + std::vector rets(1); + cv::projectPoints(xs, rvec, tvec, intrinsics, dist_coeffs, rets); + return rets[0]; + } + return cv::Vec2f(); } - bool find_correspondences(const std::vector& points, float f); + bool find_correspondences(const std::vector& points); cv::Vec2f p[PointModel::N_POINTS]; // the points in model order cv::Vec2f p_exp[PointModel::N_POINTS]; // the expected point positions @@ -116,7 +136,7 @@ protected: void reset_velocities(); - int POSIT(float f); // The POSIT algorithm, returns the number of iterations + void POSIT(float fov, int w, int h, const cv::Vec3f &headpos); // The POSIT algorithm, returns the number of iterations bool init_phase; float dt_valid; // time since last valid tracking result @@ -124,6 +144,7 @@ protected: cv::Vec3f v_r; FrameTrafo X_CM; // trafo from model to camera FrameTrafo X_CM_old; + cv::Mat rvec, tvec; }; #endif //POINTTRACKER_H -- cgit v1.2.3 From 0a32ae32452b02fae0036f9c5099b043d0c9052e Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Wed, 8 Jan 2014 21:45:34 +0100 Subject: buffer flush --- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 30 ++++++++++++++---------------- FTNoIR_Tracker_PT/point_tracker.cpp | 33 ++++++++++++++++++++++++--------- FTNoIR_Tracker_PT/point_tracker.h | 18 +++++++++--------- 3 files changed, 47 insertions(+), 34 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index a3e8919b..7bd447cb 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -83,8 +83,8 @@ void Tracker::run() if (new_frame && !frame.empty()) { frame = frame_rotation.rotate_frame(frame); - const std::vector& points = point_extractor.extract_points(frame, dt, false); - tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows, t_MH); + const std::vector& points = point_extractor.extract_points(frame, dt, true); + tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows); video_widget->update_image(frame); } #ifdef PT_PERF_LOG @@ -119,15 +119,13 @@ void Tracker::apply(settings& s) point_tracker.dt_reset = s.reset_time / 1000.0; t_MH = cv::Vec3f(s.t_MH_x, s.t_MH_y, s.t_MH_z); R_GC = Matx33f( cos(deg2rad*s.cam_yaw), 0, sin(deg2rad*s.cam_yaw), - 0, 1, 0, - -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw)); - R_GC = R_GC * Matx33f( 1, 0, 0, + 0, 1, 0, + -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw)); + R_GC = R_GC * Matx33f( 1, 0, 0, 0, cos(deg2rad*s.cam_pitch), sin(deg2rad*s.cam_pitch), 0, -sin(deg2rad*s.cam_pitch), cos(deg2rad*s.cam_pitch)); - - FrameTrafo X_MH(Matx33f::eye(), t_MH); - X_GH_0 = R_GC * X_MH; - + FrameTrafo X_MH(Matx33f::eye(), t_MH); + X_GH_0 = R_GC * X_MH; qDebug()<<"Tracker::apply ends"; } @@ -140,10 +138,10 @@ void Tracker::reset() void Tracker::center() { point_tracker.reset(); // we also do a reset here since there is no reset shortkey yet - QMutexLocker lock(&mutex); - FrameTrafo X_CM_0 = point_tracker.get_pose(); - FrameTrafo X_MH(Matx33f::eye(), t_MH); - X_GH_0 = R_GC * X_CM_0 * X_MH; + QMutexLocker lock(&mutex); + FrameTrafo X_CM_0 = point_tracker.get_pose(); + FrameTrafo X_MH(Matx33f::eye(), t_MH); + X_GH_0 = R_GC * X_CM_0 * X_MH; } bool Tracker::get_frame_and_points(cv::Mat& frame_copy, boost::shared_ptr< std::vector >& points) @@ -198,10 +196,10 @@ void Tracker::GetHeadPoseData(THeadPoseData *data) if (!tracking_valid) return; FrameTrafo X_CM = point_tracker.get_pose(); - FrameTrafo X_MH(Matx33f::eye(), t_MH); - FrameTrafo X_GH = R_GC * X_CM * X_MH; + FrameTrafo X_MH(Matx33f::eye(), t_MH); + FrameTrafo X_GH = R_GC * X_CM * X_MH; Matx33f R = X_GH.R * X_GH_0.R.t(); - Vec3f t = X_GH.t - X_GH_0.t; + Vec3f t = X_GH.t - X_GH_0.t; // get translation(s) if (s.bEnableX) data[TX] = t[0] / 10.0; // convert to cm diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 1df70b17..263be43a 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -104,6 +104,9 @@ void PointTracker::reset() init_phase = true; dt_valid = 0; reset_velocities(); + // assume identity rotation again + X_CM.R = cv::Matx33d::eye(); + X_CM.t = cv::Vec3f(); } void PointTracker::reset_velocities() @@ -113,7 +116,7 @@ void PointTracker::reset_velocities() } -bool PointTracker::track(const vector& points, float fov, float dt, int w, int h, const cv::Vec3f& headpos) +bool PointTracker::track(const vector& points, float fov, float dt, int w, int h) { if (!dynamic_pose_resolution) init_phase = true; @@ -150,7 +153,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w } // XXX TODO fov - POSIT(fov, w, h, headpos); + POSIT(fov, w, h); //qDebug()<<"Number of POSIT iterations: "<& points, float fov, float dt, int w void PointTracker::predict(float dt) { // predict with constant velocity - Matx33f R; + Matx33d R; Rodrigues(dt*v_r, R); X_CM.R = R*X_CM.R; X_CM.t += dt * v_t; @@ -232,7 +235,7 @@ bool PointTracker::find_correspondences(const vector& points) -void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos) +void PointTracker::POSIT(float fov, int w, int h) { // XXX hack this->fov = fov; @@ -241,9 +244,9 @@ void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos) std::vector obj_points; std::vector img_points; - obj_points.push_back(headpos); - obj_points.push_back(point_model->M01 + headpos); - obj_points.push_back(point_model->M02 + headpos); + obj_points.push_back(cv::Vec3f(0, 0, 0)); + obj_points.push_back(point_model->M01); + obj_points.push_back(point_model->M02); img_points.push_back(p[0]); img_points.push_back(p[1]); @@ -262,17 +265,29 @@ void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos) cv::Mat dist_coeffs = cv::Mat::zeros(5, 1, CV_32FC1); - bool lastp = !rvec.empty() && !tvec.empty() && !init_phase; + bool lastp = !rvec.empty() && !tvec.empty(); cv::solvePnP(obj_points, img_points, intrinsics, dist_coeffs, rvec, tvec, lastp, cv::ITERATIVE); cv::Mat rmat; cv::Rodrigues(rvec, rmat); + // finally, find the closer solution + cv::Mat expected = cv::Mat(X_CM.R); + cv::Mat eye = cv::Mat::eye(3, 3, CV_64FC1); + double dev1 = norm(eye - expected * rmat.t()); + double dev2 = norm(eye - expected * rmat); + + if (dev1 > dev2) + { + rmat = rmat.t(); + cv::Rodrigues(rmat, rvec); + } + // apply results for (int i = 0; i < 3; i++) { - X_CM.t[i] = tvec.at(i); + X_CM.t[i] = tvec.at(i) * 1e-2; for (int j = 0; j < 3; j++) X_CM.R(i, j) = rmat.at(i, j); } diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index e05e8f98..823d75c0 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -21,11 +21,11 @@ class FrameTrafo { public: - FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {} - FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {} + FrameTrafo() : R(cv::Matx33d::eye()), t(0,0,0) {} + FrameTrafo(const cv::Matx33d& R, const cv::Vec3d& t) : R(R),t(t) {} - cv::Matx33f R; - cv::Vec3f t; + cv::Matx33d R; + cv::Vec3d t; }; inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) @@ -33,17 +33,17 @@ inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t); } -inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y) +inline FrameTrafo operator*(const cv::Matx33d& X, const FrameTrafo& Y) { return FrameTrafo(X*Y.R, X*Y.t); } -inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y) +inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33d& Y) { return FrameTrafo(X.R*Y, X.t); } -inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v) +inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3d& v) { return X.R*v + X.t; } @@ -90,7 +90,7 @@ public: // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5) // f : (focal length)/(sensor width) // dt : time since last call - bool track(const std::vector& points, float fov, float dt, int w, int h, const cv::Vec3f &headpos); + bool track(const std::vector& points, float fov, float dt, int w, int h); boost::shared_ptr point_model; bool dynamic_pose_resolution; @@ -136,7 +136,7 @@ protected: void reset_velocities(); - void POSIT(float fov, int w, int h, const cv::Vec3f &headpos); // The POSIT algorithm, returns the number of iterations + void POSIT(float fov, int w, int h); // The POSIT algorithm, returns the number of iterations bool init_phase; float dt_valid; // time since last valid tracking result -- cgit v1.2.3 From d6855de1a83fd54c43384de218d61252de7a7a1b Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Wed, 8 Jan 2014 23:53:45 +0100 Subject: fix crash due to mismatched float/double --- FTNoIR_Tracker_PT/point_tracker.cpp | 11 +++++++---- FTNoIR_Tracker_PT/point_tracker.h | 14 +++++++------- FTNoIR_Tracker_PT/trans_calib.cpp | 4 ++-- 3 files changed, 16 insertions(+), 13 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 263be43a..9f0fb59e 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -105,8 +105,8 @@ void PointTracker::reset() dt_valid = 0; reset_velocities(); // assume identity rotation again - X_CM.R = cv::Matx33d::eye(); - X_CM.t = cv::Vec3f(); + X_CM.R = cv::Matx33f::eye(); + X_CM.t = cv::Vec3f(0, 0, 1); } void PointTracker::reset_velocities() @@ -168,7 +168,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w void PointTracker::predict(float dt) { // predict with constant velocity - Matx33d R; + Matx33f R; Rodrigues(dt*v_r, R); X_CM.R = R*X_CM.R; X_CM.t += dt * v_t; @@ -273,7 +273,10 @@ void PointTracker::POSIT(float fov, int w, int h) cv::Rodrigues(rvec, rmat); // finally, find the closer solution - cv::Mat expected = cv::Mat(X_CM.R); + cv::Mat expected(3, 3, CV_64FC1); + for (int i = 0; i < 3; i++) + for (int j = 0; j < 3; j++) + expected.at(i, j) = X_CM.R(i, j); cv::Mat eye = cv::Mat::eye(3, 3, CV_64FC1); double dev1 = norm(eye - expected * rmat.t()); double dev2 = norm(eye - expected * rmat); diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index 823d75c0..ac43489e 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -21,11 +21,11 @@ class FrameTrafo { public: - FrameTrafo() : R(cv::Matx33d::eye()), t(0,0,0) {} - FrameTrafo(const cv::Matx33d& R, const cv::Vec3d& t) : R(R),t(t) {} + FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {} + FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {} - cv::Matx33d R; - cv::Vec3d t; + cv::Matx33f R; + cv::Vec3f t; }; inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) @@ -33,17 +33,17 @@ inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t); } -inline FrameTrafo operator*(const cv::Matx33d& X, const FrameTrafo& Y) +inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y) { return FrameTrafo(X*Y.R, X*Y.t); } -inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33d& Y) +inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y) { return FrameTrafo(X.R*Y, X.t); } -inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3d& v) +inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v) { return X.R*v + X.t; } diff --git a/FTNoIR_Tracker_PT/trans_calib.cpp b/FTNoIR_Tracker_PT/trans_calib.cpp index 9b75a1b6..b2e0ead0 100644 --- a/FTNoIR_Tracker_PT/trans_calib.cpp +++ b/FTNoIR_Tracker_PT/trans_calib.cpp @@ -40,5 +40,5 @@ void TranslationCalibrator::update(const Matx33f& R_CM_k, const Vec3f& t_CM_k) Vec3f TranslationCalibrator::get_estimate() { Vec6f x = P.inv() * y; - return Vec3f(-x[0], -x[1], -x[2]); -} \ No newline at end of file + return Vec3f(-x[0], -x[1], -x[2]); +} -- cgit v1.2.3 From ff04681a70ad5f3f8984aee465b4c20b648779be Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Thu, 9 Jan 2014 07:21:23 +0100 Subject: don't touch tvec scale --- FTNoIR_Tracker_PT/point_tracker.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 9f0fb59e..c0221909 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -290,7 +290,7 @@ void PointTracker::POSIT(float fov, int w, int h) // apply results for (int i = 0; i < 3; i++) { - X_CM.t[i] = tvec.at(i) * 1e-2; + X_CM.t[i] = tvec.at(i); for (int j = 0; j < 3; j++) X_CM.R(i, j) = rmat.at(i, j); } -- cgit v1.2.3 From 24287f1c4c8308678b1f7320b303548027dd8954 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sat, 11 Jan 2014 15:27:11 +0100 Subject: remove an outdated comment --- FTNoIR_Tracker_PT/point_tracker.cpp | 1 - 1 file changed, 1 deletion(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index c0221909..cce327e9 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -152,7 +152,6 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w return false; } - // XXX TODO fov POSIT(fov, w, h); //qDebug()<<"Number of POSIT iterations: "< Date: Sat, 11 Jan 2014 15:36:14 +0100 Subject: remove hack of passing fov/w/h as member vars --- FTNoIR_Tracker_PT/point_tracker.cpp | 19 ++++++------------- FTNoIR_Tracker_PT/point_tracker.h | 8 ++------ 2 files changed, 8 insertions(+), 19 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index cce327e9..a881da62 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -91,10 +91,7 @@ PointTracker::PointTracker() dt_reset(1), v_t(0,0,0), v_r(0,0,0), - dynamic_pose_resolution(true), - fov(0), - _w(0), - _h(0) + dynamic_pose_resolution(true) { } @@ -144,7 +141,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w predict(dt_valid); // if there is a point correspondence problem something has gone wrong, do a reset - if (!find_correspondences(points)) + if (!find_correspondences(points, fov, w, h)) { //qDebug()<<"Error in finding point correspondences!"; X_CM = X_CM_old; // undo prediction @@ -181,7 +178,7 @@ void PointTracker::update_velocities(float dt) v_t = (X_CM.t - X_CM_old.t)/dt; } -bool PointTracker::find_correspondences(const vector& points) +bool PointTracker::find_correspondences(const vector& points, float fov, int w, int h) { if (init_phase) { // We do a simple freetrack-like sorting in the init phase... @@ -198,9 +195,9 @@ bool PointTracker::find_correspondences(const vector& points) else { // ... otherwise we look at the distance to the projection of the expected model points // project model points under current pose - p_exp[0] = project(Vec3f(0,0,0)); - p_exp[1] = project(point_model->M01); - p_exp[2] = project(point_model->M02); + p_exp[0] = project(Vec3f(0,0,0), fov, w, h); + p_exp[1] = project(point_model->M01, fov, w, h); + p_exp[2] = project(point_model->M02, fov, w, h); // set correspondences by minimum distance to projected model point bool point_taken[PointModel::N_POINTS]; @@ -236,10 +233,6 @@ bool PointTracker::find_correspondences(const vector& points) void PointTracker::POSIT(float fov, int w, int h) { - // XXX hack - this->fov = fov; - _w = w; - _h = h; std::vector obj_points; std::vector img_points; diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index ac43489e..741d5af4 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -98,16 +98,12 @@ public: FrameTrafo get_pose() const { return X_CM; } void reset(); - float fov; - int _w, _h; - protected: - cv::Vec2f project(const cv::Vec3f& v_M) + cv::Vec2f project(const cv::Vec3f& v_M, float fov, int w, int h) { if (!rvec.empty() && !tvec.empty() && fov > 0) { const float HT_PI = 3.1415926535; - const int w = _w, h = _h; const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); @@ -126,7 +122,7 @@ protected: return cv::Vec2f(); } - bool find_correspondences(const std::vector& points); + bool find_correspondences(const std::vector& points, float fov, int w, int h); cv::Vec2f p[PointModel::N_POINTS]; // the points in model order cv::Vec2f p_exp[PointModel::N_POINTS]; // the expected point positions -- cgit v1.2.3 From 7217cf5ac2191c92c956c88f012282882593bfc4 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sat, 11 Jan 2014 16:27:09 +0100 Subject: fix translation estimation --- FTNoIR_Tracker_PT/point_tracker.cpp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index a881da62..2dd89aed 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -240,9 +240,11 @@ void PointTracker::POSIT(float fov, int w, int h) obj_points.push_back(point_model->M01); obj_points.push_back(point_model->M02); - img_points.push_back(p[0]); - img_points.push_back(p[1]); - img_points.push_back(p[2]); + for (int i = 0; i < 3; i++) + { + auto p2 = cv::Point(p[i][0] * w + w/2, p[i][1] * h + h/2); + img_points.push_back(p2); + } const float HT_PI = 3.1415926535; -- cgit v1.2.3 From cb2afb510c98f168e312007cbb92f70b7b4ffa4b Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sat, 11 Jan 2014 16:41:55 +0100 Subject: reset levmarq internal state on reset (duh) --- FTNoIR_Tracker_PT/point_tracker.cpp | 2 ++ 1 file changed, 2 insertions(+) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 2dd89aed..69d46645 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -104,6 +104,8 @@ void PointTracker::reset() // assume identity rotation again X_CM.R = cv::Matx33f::eye(); X_CM.t = cv::Vec3f(0, 0, 1); + rvec = Mat(); + tvec = Mat(); } void PointTracker::reset_velocities() -- cgit v1.2.3 From 7da7a26385c1d54762d6f2d11a7036d856bc5864 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sat, 11 Jan 2014 18:07:48 +0100 Subject: use correct scale for point coords --- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 2 +- FTNoIR_Tracker_PT/point_tracker.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index c94fa3ff..9f74127f 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -86,7 +86,7 @@ void Tracker::run() for (auto p : points) { auto p2 = cv::Point(p[0] * frame.cols + frame.cols/2, - p[1] * frame.rows + frame.rows/2); + -p[1] * frame.cols + frame.rows/2); cv::Scalar color(0, 255, 0); qDebug() << p2.x << p2.y; cv::line(frame, diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 69d46645..faac9963 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -244,7 +244,7 @@ void PointTracker::POSIT(float fov, int w, int h) for (int i = 0; i < 3; i++) { - auto p2 = cv::Point(p[i][0] * w + w/2, p[i][1] * h + h/2); + auto p2 = cv::Point(p[i][0] * w + w/2, -p[i][1] * h + w/2); img_points.push_back(p2); } -- cgit v1.2.3 From 69dddfbab322d55d6b26336f20fcfdb2ebaecdad Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sat, 11 Jan 2014 18:13:01 +0100 Subject: FINALLY fix d_order -> image pos --- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 3 +-- FTNoIR_Tracker_PT/point_tracker.cpp | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index 102f62ee..c949b8d0 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -85,8 +85,7 @@ void Tracker::run() const std::vector& points = point_extractor.extract_points(frame, dt, true); for (auto p : points) { - auto p2 = cv::Point(p[0] * frame.cols + frame.cols/2, - -p[1] * frame.cols + frame.rows/2); + auto p2 = cv::Point(p[0] * frame.cols + frame.cols/2, -p[1] * frame.cols + frame.rows/2); cv::Scalar color(0, 255, 0); cv::line(frame, cv::Point(p2.x - 20, p2.y), diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index faac9963..96d9d972 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -244,7 +244,7 @@ void PointTracker::POSIT(float fov, int w, int h) for (int i = 0; i < 3; i++) { - auto p2 = cv::Point(p[i][0] * w + w/2, -p[i][1] * h + w/2); + auto p2 = cv::Point(p[i][0] * w + w/2, -p[i][1] * w + h/2); img_points.push_back(p2); } -- cgit v1.2.3 From d471aba741dc9e3c70c919820e36a2613590d723 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 18:23:21 +0100 Subject: correct focal length equation --- FTNoIR_Tracker_PT/point_tracker.cpp | 4 ++-- FTNoIR_Tracker_PT/point_tracker.h | 4 ++-- ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 96d9d972..885b327a 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -250,8 +250,8 @@ void PointTracker::POSIT(float fov, int w, int h) const float HT_PI = 3.1415926535; - const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); - const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); + const float focal_length_w = 0.5 * w / tan(0.5 * fov * HT_PI / 180); + const float focal_length_h = 0.5 * h / tan(0.5 * fov * h / w * HT_PI / 180.0); cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); intrinsics.at (0, 0) = focal_length_w; diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index 741d5af4..69eb9bba 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -104,8 +104,8 @@ protected: if (!rvec.empty() && !tvec.empty() && fov > 0) { const float HT_PI = 3.1415926535; - const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); - const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); + const float focal_length_w = 0.5 * w / tan(0.5 * fov * HT_PI / 180); + const float focal_length_h = 0.5 * h / tan(0.5 * fov * h / w * HT_PI / 180.0); cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); intrinsics.at (0, 0) = focal_length_w; diff --git a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp index cbd9f531..77f794b9 100644 --- a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp +++ b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp @@ -217,8 +217,8 @@ void Tracker::run() const int scale = frame.cols > 480 ? 2 : 1; detector.setThresholdParams(scale > 1 ? 11 : 7, 4); - const float focal_length_w = 0.5 * grayscale.cols / tan(s.fov * HT_PI / 180); - const float focal_length_h = 0.5 * grayscale.rows / tan(s.fov * grayscale.rows / grayscale.cols * HT_PI / 180.0); + const float focal_length_w = 0.5 * grayscale.cols / tan(0.5 * s.fov * HT_PI / 180); + const float focal_length_h = 0.5 * grayscale.rows / tan(0.5 * s.fov * grayscale.rows / grayscale.cols * HT_PI / 180.0); cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); intrinsics.at (0, 0) = focal_length_w; intrinsics.at (1, 1) = focal_length_h; @@ -286,7 +286,7 @@ void Tracker::run() cv::TermCriteria(cv::TermCriteria::MAX_ITER | cv::TermCriteria::EPS, 20, 1e-2)); - const float size = 7; + const float size = 3.5; const double p = s.marker_pitch; const double sq = sin(p * HT_PI / 180); -- cgit v1.2.3 From b83a47cb32443981ce9998b2015f7d310fb72821 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:49:57 +0100 Subject: Revert "correct focal length equation" This reverts commit 4312ef4098a449d38209c4f20ed573e9deb1713c. --- FTNoIR_Tracker_PT/point_tracker.cpp | 4 ++-- FTNoIR_Tracker_PT/point_tracker.h | 4 ++-- ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 885b327a..96d9d972 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -250,8 +250,8 @@ void PointTracker::POSIT(float fov, int w, int h) const float HT_PI = 3.1415926535; - const float focal_length_w = 0.5 * w / tan(0.5 * fov * HT_PI / 180); - const float focal_length_h = 0.5 * h / tan(0.5 * fov * h / w * HT_PI / 180.0); + const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); + const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); intrinsics.at (0, 0) = focal_length_w; diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index 69eb9bba..741d5af4 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -104,8 +104,8 @@ protected: if (!rvec.empty() && !tvec.empty() && fov > 0) { const float HT_PI = 3.1415926535; - const float focal_length_w = 0.5 * w / tan(0.5 * fov * HT_PI / 180); - const float focal_length_h = 0.5 * h / tan(0.5 * fov * h / w * HT_PI / 180.0); + const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); + const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); intrinsics.at (0, 0) = focal_length_w; diff --git a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp index 9d484775..fc641ef1 100644 --- a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp +++ b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp @@ -217,8 +217,8 @@ void Tracker::run() const int scale = frame.cols > 480 ? 2 : 1; detector.setThresholdParams(scale > 1 ? 11 : 7, 4); - const float focal_length_w = 0.5 * grayscale.cols / tan(0.5 * s.fov * HT_PI / 180); - const float focal_length_h = 0.5 * grayscale.rows / tan(0.5 * s.fov * grayscale.rows / grayscale.cols * HT_PI / 180.0); + const float focal_length_w = 0.5 * grayscale.cols / tan(s.fov * HT_PI / 180); + const float focal_length_h = 0.5 * grayscale.rows / tan(s.fov * grayscale.rows / grayscale.cols * HT_PI / 180.0); cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); intrinsics.at (0, 0) = focal_length_w; intrinsics.at (1, 1) = focal_length_h; @@ -286,7 +286,7 @@ void Tracker::run() cv::TermCriteria(cv::TermCriteria::MAX_ITER | cv::TermCriteria::EPS, 20, 1e-2)); - const float size = 3.5; + const float size = 7; const double p = s.marker_pitch; const double sq = sin(p * HT_PI / 180); -- cgit v1.2.3 From 488198a4b440936a7fb7fc90c5527a32a0f0b249 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:50:00 +0100 Subject: Revert "FINALLY fix d_order -> image pos" This reverts commit 4bfda0171465193476cebb03373ae165ab9e67ab. --- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 3 ++- FTNoIR_Tracker_PT/point_tracker.cpp | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index c949b8d0..102f62ee 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -85,7 +85,8 @@ void Tracker::run() const std::vector& points = point_extractor.extract_points(frame, dt, true); for (auto p : points) { - auto p2 = cv::Point(p[0] * frame.cols + frame.cols/2, -p[1] * frame.cols + frame.rows/2); + auto p2 = cv::Point(p[0] * frame.cols + frame.cols/2, + -p[1] * frame.cols + frame.rows/2); cv::Scalar color(0, 255, 0); cv::line(frame, cv::Point(p2.x - 20, p2.y), diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 96d9d972..faac9963 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -244,7 +244,7 @@ void PointTracker::POSIT(float fov, int w, int h) for (int i = 0; i < 3; i++) { - auto p2 = cv::Point(p[i][0] * w + w/2, -p[i][1] * w + h/2); + auto p2 = cv::Point(p[i][0] * w + w/2, -p[i][1] * h + w/2); img_points.push_back(p2); } -- cgit v1.2.3 From f281ba0224b31eec31469372b3cbc27706a9b381 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:50:03 +0100 Subject: Revert "use correct scale for point coords" This reverts commit 3ea8dbeb7574e7a61388adcc3a9fd722798504fe. --- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 2 +- FTNoIR_Tracker_PT/point_tracker.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index 9f74127f..c94fa3ff 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -86,7 +86,7 @@ void Tracker::run() for (auto p : points) { auto p2 = cv::Point(p[0] * frame.cols + frame.cols/2, - -p[1] * frame.cols + frame.rows/2); + p[1] * frame.rows + frame.rows/2); cv::Scalar color(0, 255, 0); qDebug() << p2.x << p2.y; cv::line(frame, diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index faac9963..69d46645 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -244,7 +244,7 @@ void PointTracker::POSIT(float fov, int w, int h) for (int i = 0; i < 3; i++) { - auto p2 = cv::Point(p[i][0] * w + w/2, -p[i][1] * h + w/2); + auto p2 = cv::Point(p[i][0] * w + w/2, p[i][1] * h + h/2); img_points.push_back(p2); } -- cgit v1.2.3 From 4e4e72f6c9c2c133f050c04c4f9117aebfd1ee4b Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:50:05 +0100 Subject: Revert "reset levmarq internal state on reset (duh)" This reverts commit 476b4fd96effe8108ad65930bb90cb2a4bed2216. --- FTNoIR_Tracker_PT/point_tracker.cpp | 2 -- 1 file changed, 2 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 69d46645..2dd89aed 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -104,8 +104,6 @@ void PointTracker::reset() // assume identity rotation again X_CM.R = cv::Matx33f::eye(); X_CM.t = cv::Vec3f(0, 0, 1); - rvec = Mat(); - tvec = Mat(); } void PointTracker::reset_velocities() -- cgit v1.2.3 From 421f63c8ff3d434fec8a93e38e9178cdb3c593c6 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:50:06 +0100 Subject: Revert "fix translation estimation" This reverts commit 5cf25c6d0e524d21ea3eb9691f922b8c9b2fd73d. --- FTNoIR_Tracker_PT/point_tracker.cpp | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 2dd89aed..a881da62 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -240,11 +240,9 @@ void PointTracker::POSIT(float fov, int w, int h) obj_points.push_back(point_model->M01); obj_points.push_back(point_model->M02); - for (int i = 0; i < 3; i++) - { - auto p2 = cv::Point(p[i][0] * w + w/2, p[i][1] * h + h/2); - img_points.push_back(p2); - } + img_points.push_back(p[0]); + img_points.push_back(p[1]); + img_points.push_back(p[2]); const float HT_PI = 3.1415926535; -- cgit v1.2.3 From 302dfd055bd368752216701a0d0e963d5bf6d3e9 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:53:59 +0100 Subject: Revert "remove hack of passing fov/w/h as member vars" This reverts commit 6e1610667fb33517299997e628652fd91380e762. --- FTNoIR_Tracker_PT/point_tracker.cpp | 19 +++++++++++++------ FTNoIR_Tracker_PT/point_tracker.h | 8 ++++++-- 2 files changed, 19 insertions(+), 8 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index a881da62..cce327e9 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -91,7 +91,10 @@ PointTracker::PointTracker() dt_reset(1), v_t(0,0,0), v_r(0,0,0), - dynamic_pose_resolution(true) + dynamic_pose_resolution(true), + fov(0), + _w(0), + _h(0) { } @@ -141,7 +144,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w predict(dt_valid); // if there is a point correspondence problem something has gone wrong, do a reset - if (!find_correspondences(points, fov, w, h)) + if (!find_correspondences(points)) { //qDebug()<<"Error in finding point correspondences!"; X_CM = X_CM_old; // undo prediction @@ -178,7 +181,7 @@ void PointTracker::update_velocities(float dt) v_t = (X_CM.t - X_CM_old.t)/dt; } -bool PointTracker::find_correspondences(const vector& points, float fov, int w, int h) +bool PointTracker::find_correspondences(const vector& points) { if (init_phase) { // We do a simple freetrack-like sorting in the init phase... @@ -195,9 +198,9 @@ bool PointTracker::find_correspondences(const vector& points, float fov, else { // ... otherwise we look at the distance to the projection of the expected model points // project model points under current pose - p_exp[0] = project(Vec3f(0,0,0), fov, w, h); - p_exp[1] = project(point_model->M01, fov, w, h); - p_exp[2] = project(point_model->M02, fov, w, h); + p_exp[0] = project(Vec3f(0,0,0)); + p_exp[1] = project(point_model->M01); + p_exp[2] = project(point_model->M02); // set correspondences by minimum distance to projected model point bool point_taken[PointModel::N_POINTS]; @@ -233,6 +236,10 @@ bool PointTracker::find_correspondences(const vector& points, float fov, void PointTracker::POSIT(float fov, int w, int h) { + // XXX hack + this->fov = fov; + _w = w; + _h = h; std::vector obj_points; std::vector img_points; diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index 741d5af4..ac43489e 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -98,12 +98,16 @@ public: FrameTrafo get_pose() const { return X_CM; } void reset(); + float fov; + int _w, _h; + protected: - cv::Vec2f project(const cv::Vec3f& v_M, float fov, int w, int h) + cv::Vec2f project(const cv::Vec3f& v_M) { if (!rvec.empty() && !tvec.empty() && fov > 0) { const float HT_PI = 3.1415926535; + const int w = _w, h = _h; const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); @@ -122,7 +126,7 @@ protected: return cv::Vec2f(); } - bool find_correspondences(const std::vector& points, float fov, int w, int h); + bool find_correspondences(const std::vector& points); cv::Vec2f p[PointModel::N_POINTS]; // the points in model order cv::Vec2f p_exp[PointModel::N_POINTS]; // the expected point positions -- cgit v1.2.3 From 7a7f7e70a6f104c5838b162f4d28afa6a8ae1d5f Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:54:00 +0100 Subject: Revert "remove an outdated comment" This reverts commit 7be138a8ece3cd90e0cbca0b30e812d0a19d10c6. --- FTNoIR_Tracker_PT/point_tracker.cpp | 1 + 1 file changed, 1 insertion(+) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index cce327e9..c0221909 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -152,6 +152,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w return false; } + // XXX TODO fov POSIT(fov, w, h); //qDebug()<<"Number of POSIT iterations: "< Date: Sun, 12 Jan 2014 19:54:01 +0100 Subject: Revert "don't touch tvec scale" This reverts commit 69bc6490911f062c7eb72ff73431bfe7381034fd. --- FTNoIR_Tracker_PT/point_tracker.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index c0221909..9f0fb59e 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -290,7 +290,7 @@ void PointTracker::POSIT(float fov, int w, int h) // apply results for (int i = 0; i < 3; i++) { - X_CM.t[i] = tvec.at(i); + X_CM.t[i] = tvec.at(i) * 1e-2; for (int j = 0; j < 3; j++) X_CM.R(i, j) = rmat.at(i, j); } -- cgit v1.2.3 From 80c43ba62517d1228423f9d1992999edda612616 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:54:02 +0100 Subject: Revert "fix crash due to mismatched float/double" This reverts commit 9fc8e2da02f2e5c2295199826e408cc026d6eae6. --- FTNoIR_Tracker_PT/point_tracker.cpp | 11 ++++------- FTNoIR_Tracker_PT/point_tracker.h | 14 +++++++------- FTNoIR_Tracker_PT/trans_calib.cpp | 4 ++-- 3 files changed, 13 insertions(+), 16 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 9f0fb59e..263be43a 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -105,8 +105,8 @@ void PointTracker::reset() dt_valid = 0; reset_velocities(); // assume identity rotation again - X_CM.R = cv::Matx33f::eye(); - X_CM.t = cv::Vec3f(0, 0, 1); + X_CM.R = cv::Matx33d::eye(); + X_CM.t = cv::Vec3f(); } void PointTracker::reset_velocities() @@ -168,7 +168,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w void PointTracker::predict(float dt) { // predict with constant velocity - Matx33f R; + Matx33d R; Rodrigues(dt*v_r, R); X_CM.R = R*X_CM.R; X_CM.t += dt * v_t; @@ -273,10 +273,7 @@ void PointTracker::POSIT(float fov, int w, int h) cv::Rodrigues(rvec, rmat); // finally, find the closer solution - cv::Mat expected(3, 3, CV_64FC1); - for (int i = 0; i < 3; i++) - for (int j = 0; j < 3; j++) - expected.at(i, j) = X_CM.R(i, j); + cv::Mat expected = cv::Mat(X_CM.R); cv::Mat eye = cv::Mat::eye(3, 3, CV_64FC1); double dev1 = norm(eye - expected * rmat.t()); double dev2 = norm(eye - expected * rmat); diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index ac43489e..823d75c0 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -21,11 +21,11 @@ class FrameTrafo { public: - FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {} - FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {} + FrameTrafo() : R(cv::Matx33d::eye()), t(0,0,0) {} + FrameTrafo(const cv::Matx33d& R, const cv::Vec3d& t) : R(R),t(t) {} - cv::Matx33f R; - cv::Vec3f t; + cv::Matx33d R; + cv::Vec3d t; }; inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) @@ -33,17 +33,17 @@ inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t); } -inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y) +inline FrameTrafo operator*(const cv::Matx33d& X, const FrameTrafo& Y) { return FrameTrafo(X*Y.R, X*Y.t); } -inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y) +inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33d& Y) { return FrameTrafo(X.R*Y, X.t); } -inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v) +inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3d& v) { return X.R*v + X.t; } diff --git a/FTNoIR_Tracker_PT/trans_calib.cpp b/FTNoIR_Tracker_PT/trans_calib.cpp index b2e0ead0..9b75a1b6 100644 --- a/FTNoIR_Tracker_PT/trans_calib.cpp +++ b/FTNoIR_Tracker_PT/trans_calib.cpp @@ -40,5 +40,5 @@ void TranslationCalibrator::update(const Matx33f& R_CM_k, const Vec3f& t_CM_k) Vec3f TranslationCalibrator::get_estimate() { Vec6f x = P.inv() * y; - return Vec3f(-x[0], -x[1], -x[2]); -} + return Vec3f(-x[0], -x[1], -x[2]); +} \ No newline at end of file -- cgit v1.2.3 From 9d21e7d23fa17b91fb76a14d90630b027b78c2c8 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:54:03 +0100 Subject: Revert "buffer flush" This reverts commit 0f6a0d8b20aa1d7415f6d0596f91ea9b766ecf69. --- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 30 ++++++++++++++++-------------- FTNoIR_Tracker_PT/point_tracker.cpp | 33 +++++++++------------------------ FTNoIR_Tracker_PT/point_tracker.h | 18 +++++++++--------- 3 files changed, 34 insertions(+), 47 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index 83bf6911..740cf3da 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -82,8 +82,8 @@ void Tracker::run() if (new_frame && !frame.empty()) { frame = frame_rotation.rotate_frame(frame); - const std::vector& points = point_extractor.extract_points(frame, dt, true); - tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows); + const std::vector& points = point_extractor.extract_points(frame, dt, false); + tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows, t_MH); video_widget->update_image(frame); } #ifdef PT_PERF_LOG @@ -118,13 +118,15 @@ void Tracker::apply(settings& s) point_tracker.dt_reset = s.reset_time / 1000.0; t_MH = cv::Vec3f(s.t_MH_x, s.t_MH_y, s.t_MH_z); R_GC = Matx33f( cos(deg2rad*s.cam_yaw), 0, sin(deg2rad*s.cam_yaw), - 0, 1, 0, - -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw)); - R_GC = R_GC * Matx33f( 1, 0, 0, + 0, 1, 0, + -sin(deg2rad*s.cam_yaw), 0, cos(deg2rad*s.cam_yaw)); + R_GC = R_GC * Matx33f( 1, 0, 0, 0, cos(deg2rad*s.cam_pitch), sin(deg2rad*s.cam_pitch), 0, -sin(deg2rad*s.cam_pitch), cos(deg2rad*s.cam_pitch)); - FrameTrafo X_MH(Matx33f::eye(), t_MH); - X_GH_0 = R_GC * X_MH; + + FrameTrafo X_MH(Matx33f::eye(), t_MH); + X_GH_0 = R_GC * X_MH; + qDebug()<<"Tracker::apply ends"; } @@ -137,10 +139,10 @@ void Tracker::reset() void Tracker::center() { point_tracker.reset(); // we also do a reset here since there is no reset shortkey yet - QMutexLocker lock(&mutex); - FrameTrafo X_CM_0 = point_tracker.get_pose(); - FrameTrafo X_MH(Matx33f::eye(), t_MH); - X_GH_0 = R_GC * X_CM_0 * X_MH; + QMutexLocker lock(&mutex); + FrameTrafo X_CM_0 = point_tracker.get_pose(); + FrameTrafo X_MH(Matx33f::eye(), t_MH); + X_GH_0 = R_GC * X_CM_0 * X_MH; } bool Tracker::get_frame_and_points(cv::Mat& frame_copy, boost::shared_ptr< std::vector >& points) @@ -195,10 +197,10 @@ void Tracker::GetHeadPoseData(THeadPoseData *data) if (!tracking_valid) return; FrameTrafo X_CM = point_tracker.get_pose(); - FrameTrafo X_MH(Matx33f::eye(), t_MH); - FrameTrafo X_GH = R_GC * X_CM * X_MH; + FrameTrafo X_MH(Matx33f::eye(), t_MH); + FrameTrafo X_GH = R_GC * X_CM * X_MH; Matx33f R = X_GH.R * X_GH_0.R.t(); - Vec3f t = X_GH.t - X_GH_0.t; + Vec3f t = X_GH.t - X_GH_0.t; // get translation(s) if (s.bEnableX) data[TX] = t[0] / 10.0; // convert to cm diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 263be43a..1df70b17 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -104,9 +104,6 @@ void PointTracker::reset() init_phase = true; dt_valid = 0; reset_velocities(); - // assume identity rotation again - X_CM.R = cv::Matx33d::eye(); - X_CM.t = cv::Vec3f(); } void PointTracker::reset_velocities() @@ -116,7 +113,7 @@ void PointTracker::reset_velocities() } -bool PointTracker::track(const vector& points, float fov, float dt, int w, int h) +bool PointTracker::track(const vector& points, float fov, float dt, int w, int h, const cv::Vec3f& headpos) { if (!dynamic_pose_resolution) init_phase = true; @@ -153,7 +150,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w } // XXX TODO fov - POSIT(fov, w, h); + POSIT(fov, w, h, headpos); //qDebug()<<"Number of POSIT iterations: "<& points, float fov, float dt, int w void PointTracker::predict(float dt) { // predict with constant velocity - Matx33d R; + Matx33f R; Rodrigues(dt*v_r, R); X_CM.R = R*X_CM.R; X_CM.t += dt * v_t; @@ -235,7 +232,7 @@ bool PointTracker::find_correspondences(const vector& points) -void PointTracker::POSIT(float fov, int w, int h) +void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos) { // XXX hack this->fov = fov; @@ -244,9 +241,9 @@ void PointTracker::POSIT(float fov, int w, int h) std::vector obj_points; std::vector img_points; - obj_points.push_back(cv::Vec3f(0, 0, 0)); - obj_points.push_back(point_model->M01); - obj_points.push_back(point_model->M02); + obj_points.push_back(headpos); + obj_points.push_back(point_model->M01 + headpos); + obj_points.push_back(point_model->M02 + headpos); img_points.push_back(p[0]); img_points.push_back(p[1]); @@ -265,29 +262,17 @@ void PointTracker::POSIT(float fov, int w, int h) cv::Mat dist_coeffs = cv::Mat::zeros(5, 1, CV_32FC1); - bool lastp = !rvec.empty() && !tvec.empty(); + bool lastp = !rvec.empty() && !tvec.empty() && !init_phase; cv::solvePnP(obj_points, img_points, intrinsics, dist_coeffs, rvec, tvec, lastp, cv::ITERATIVE); cv::Mat rmat; cv::Rodrigues(rvec, rmat); - // finally, find the closer solution - cv::Mat expected = cv::Mat(X_CM.R); - cv::Mat eye = cv::Mat::eye(3, 3, CV_64FC1); - double dev1 = norm(eye - expected * rmat.t()); - double dev2 = norm(eye - expected * rmat); - - if (dev1 > dev2) - { - rmat = rmat.t(); - cv::Rodrigues(rmat, rvec); - } - // apply results for (int i = 0; i < 3; i++) { - X_CM.t[i] = tvec.at(i) * 1e-2; + X_CM.t[i] = tvec.at(i); for (int j = 0; j < 3; j++) X_CM.R(i, j) = rmat.at(i, j); } diff --git a/FTNoIR_Tracker_PT/point_tracker.h b/FTNoIR_Tracker_PT/point_tracker.h index 823d75c0..e05e8f98 100644 --- a/FTNoIR_Tracker_PT/point_tracker.h +++ b/FTNoIR_Tracker_PT/point_tracker.h @@ -21,11 +21,11 @@ class FrameTrafo { public: - FrameTrafo() : R(cv::Matx33d::eye()), t(0,0,0) {} - FrameTrafo(const cv::Matx33d& R, const cv::Vec3d& t) : R(R),t(t) {} + FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {} + FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {} - cv::Matx33d R; - cv::Vec3d t; + cv::Matx33f R; + cv::Vec3f t; }; inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) @@ -33,17 +33,17 @@ inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y) return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t); } -inline FrameTrafo operator*(const cv::Matx33d& X, const FrameTrafo& Y) +inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y) { return FrameTrafo(X*Y.R, X*Y.t); } -inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33d& Y) +inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y) { return FrameTrafo(X.R*Y, X.t); } -inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3d& v) +inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v) { return X.R*v + X.t; } @@ -90,7 +90,7 @@ public: // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5) // f : (focal length)/(sensor width) // dt : time since last call - bool track(const std::vector& points, float fov, float dt, int w, int h); + bool track(const std::vector& points, float fov, float dt, int w, int h, const cv::Vec3f &headpos); boost::shared_ptr point_model; bool dynamic_pose_resolution; @@ -136,7 +136,7 @@ protected: void reset_velocities(); - void POSIT(float fov, int w, int h); // The POSIT algorithm, returns the number of iterations + void POSIT(float fov, int w, int h, const cv::Vec3f &headpos); // The POSIT algorithm, returns the number of iterations bool init_phase; float dt_valid; // time since last valid tracking result -- cgit v1.2.3 From 689c297be28e2f5050e234bdc69594c53166f194 Mon Sep 17 00:00:00 2001 From: Stanislaw Halik Date: Sun, 12 Jan 2014 19:54:04 +0100 Subject: Revert "use levmarq instead of coplanar POSIT implemented in numerically unstable fashion" This reverts commit 0bc59a0a7a42bcf65ebf3814e5f25fb17d735faa. --- FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui | 13 +- FTNoIR_Tracker_PT/camera.cpp | 8 +- FTNoIR_Tracker_PT/camera.h | 6 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp | 4 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp | 2 +- FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h | 4 +- FTNoIR_Tracker_PT/point_tracker.cpp | 191 +++++++++++++++++++------ FTNoIR_Tracker_PT/point_tracker.h | 33 +---- 8 files changed, 169 insertions(+), 92 deletions(-) (limited to 'FTNoIR_Tracker_PT/point_tracker.cpp') diff --git a/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui b/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui index 461253cf..bdbed955 100644 --- a/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui +++ b/FTNoIR_Tracker_PT/FTNoIR_PT_Controls.ui @@ -515,26 +515,23 @@ - FOV + F/W - fov_dspin + f_dspin - + The camera's focal length devided by its sensor width 2 - - 1.000000000000000 - - 1.000000000000000 + 0.100000000000000 @@ -1736,7 +1733,7 @@ res_x_spin res_y_spin fps_spin - fov_dspin + f_dspin camroll_combo campitch_spin camyaw_spin diff --git a/FTNoIR_Tracker_PT/camera.cpp b/FTNoIR_Tracker_PT/camera.cpp index 8986be60..754533c5 100644 --- a/FTNoIR_Tracker_PT/camera.cpp +++ b/FTNoIR_Tracker_PT/camera.cpp @@ -108,11 +108,11 @@ void Camera::set_device_index(int index) } } -void Camera::set_fov(float f) +void Camera::set_f(float f) { - if (cam_desired.fov != f) + if (cam_desired.f != f) { - cam_desired.fov = f; + cam_desired.f = f; _set_f(); } } @@ -208,7 +208,7 @@ void CVCamera::_set_index() void CVCamera::_set_f() { - cam_info.fov = cam_desired.fov; + cam_info.f = cam_desired.f; } void CVCamera::_set_fps() diff --git a/FTNoIR_Tracker_PT/camera.h b/FTNoIR_Tracker_PT/camera.h index 6768e419..ea68c387 100644 --- a/FTNoIR_Tracker_PT/camera.h +++ b/FTNoIR_Tracker_PT/camera.h @@ -25,12 +25,12 @@ void get_camera_device_names(std::vector& device_names); // ---------------------------------------------------------------------------- struct CamInfo { - CamInfo() : res_x(0), res_y(0), fps(0), fov(56) {} + CamInfo() : res_x(0), res_y(0), fps(0), f(1) {} int res_x; int res_y; int fps; - float fov; + float f; // (focal length) / (sensor width) }; // ---------------------------------------------------------------------------- @@ -48,7 +48,7 @@ public: // calls corresponding template methods and reinitializes frame rate calculation void set_device_index(int index); - void set_fov(float f); + void set_f(float f); void set_fps(int fps); void set_res(int x_res, int y_res); diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp index 740cf3da..53d9c9e4 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt.cpp @@ -83,7 +83,7 @@ void Tracker::run() { frame = frame_rotation.rotate_frame(frame); const std::vector& points = point_extractor.extract_points(frame, dt, false); - tracking_valid = point_tracker.track(points, camera.get_info().fov, dt, frame.cols, frame.rows, t_MH); + tracking_valid = point_tracker.track(points, camera.get_info().f, dt); video_widget->update_image(frame); } #ifdef PT_PERF_LOG @@ -103,7 +103,7 @@ void Tracker::apply(settings& s) camera.set_device_index(s.cam_index); camera.set_res(s.cam_res_x, s.cam_res_y); camera.set_fps(s.cam_fps); - camera.set_fov(s.cam_fov); + camera.set_f(s.cam_f); frame_rotation.rotation = static_cast(static_cast(s.cam_roll)); point_extractor.threshold_val = s.threshold; point_extractor.threshold_secondary_val = s.threshold_secondary; diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp index 4ae20f48..c103b78c 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_dialog.cpp @@ -46,7 +46,7 @@ TrackerDialog::TrackerDialog() tie_setting(s.reset_time, ui.reset_spin); tie_setting(s.cam_index, ui.camdevice_combo); - tie_setting(s.cam_fov, ui.fov_dspin); + tie_setting(s.cam_f, ui.f_dspin); tie_setting(s.cam_res_x, ui.res_x_spin); tie_setting(s.cam_res_y, ui.res_y_spin); tie_setting(s.cam_fps, ui.fps_spin); diff --git a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h index b2ab8854..109090b3 100644 --- a/FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h +++ b/FTNoIR_Tracker_PT/ftnoir_tracker_pt_settings.h @@ -28,7 +28,7 @@ struct settings threshold_secondary, min_point_size, max_point_size; - value cam_fov; + value cam_f; value m01_x, m01_y, m01_z; value m02_x, m02_y, m02_z; @@ -57,7 +57,7 @@ struct settings threshold_secondary(b, "threshold-secondary", 128), min_point_size(b, "min-point-size", 10), max_point_size(b, "max-point-size", 50), - cam_fov(b, "camera-fov", 56), + cam_f(b, "camera-focal-length", 1), m01_x(b, "m_01-x", 0), m01_y(b, "m_01-y", 0), m01_z(b, "m_01-z", 0), diff --git a/FTNoIR_Tracker_PT/point_tracker.cpp b/FTNoIR_Tracker_PT/point_tracker.cpp index 1df70b17..dfefdaf8 100644 --- a/FTNoIR_Tracker_PT/point_tracker.cpp +++ b/FTNoIR_Tracker_PT/point_tracker.cpp @@ -17,6 +17,23 @@ using namespace cv; using namespace boost; using namespace std; +const float PI = 3.14159265358979323846f; + +// ---------------------------------------------------------------------------- +static void get_row(const Matx33f& m, int i, Vec3f& v) +{ + v[0] = m(i,0); + v[1] = m(i,1); + v[2] = m(i,2); +} + +static void set_row(Matx33f& m, int i, const Vec3f& v) +{ + m(i,0) = v[0]; + m(i,1) = v[1]; + m(i,2) = v[2]; +} + // ---------------------------------------------------------------------------- PointModel::PointModel(Vec3f M01, Vec3f M02) : M01(M01), @@ -91,11 +108,9 @@ PointTracker::PointTracker() dt_reset(1), v_t(0,0,0), v_r(0,0,0), - dynamic_pose_resolution(true), - fov(0), - _w(0), - _h(0) + dynamic_pose_resolution(true) { + X_CM.t[2] = 1000; // default position: 1 m away from cam; } void PointTracker::reset() @@ -113,7 +128,7 @@ void PointTracker::reset_velocities() } -bool PointTracker::track(const vector& points, float fov, float dt, int w, int h, const cv::Vec3f& headpos) +bool PointTracker::track(const vector& points, float f, float dt) { if (!dynamic_pose_resolution) init_phase = true; @@ -125,7 +140,12 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w reset(); } - bool no_model = !point_model; + bool no_model = +#ifdef OPENTRACK_API + point_model.get() == NULL; +#else + !point_model; +#endif // if there is a pointtracking problem, reset the velocities if (no_model || points.size() != PointModel::N_POINTS) @@ -141,7 +161,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w predict(dt_valid); // if there is a point correspondence problem something has gone wrong, do a reset - if (!find_correspondences(points)) + if (!find_correspondences(points, f)) { //qDebug()<<"Error in finding point correspondences!"; X_CM = X_CM_old; // undo prediction @@ -149,8 +169,7 @@ bool PointTracker::track(const vector& points, float fov, float dt, int w return false; } - // XXX TODO fov - POSIT(fov, w, h, headpos); + int n_iter = POSIT(f); //qDebug()<<"Number of POSIT iterations: "<& points) +bool PointTracker::find_correspondences(const vector& points, float f) { if (init_phase) { // We do a simple freetrack-like sorting in the init phase... @@ -196,9 +215,9 @@ bool PointTracker::find_correspondences(const vector& points) else { // ... otherwise we look at the distance to the projection of the expected model points // project model points under current pose - p_exp[0] = project(Vec3f(0,0,0)); - p_exp[1] = project(point_model->M01); - p_exp[2] = project(point_model->M02); + p_exp[0] = project(Vec3f(0,0,0), f); + p_exp[1] = project(point_model->M01, f); + p_exp[2] = project(point_model->M02, f); // set correspondences by minimum distance to projected model point bool point_taken[PointModel::N_POINTS]; @@ -232,48 +251,130 @@ bool PointTracker::find_correspondences(const vector& points) -void PointTracker::POSIT(float fov, int w, int h, const cv::Vec3f& headpos) +int PointTracker::POSIT(float f) { - // XXX hack - this->fov = fov; - _w = w; - _h = h; - std::vector obj_points; - std::vector img_points; + // POSIT algorithm for coplanar points as presented in + // [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"] + // we use the same notation as in the paper here + + // The expected rotation used for resolving the ambiguity in POSIT: + // In every iteration step the rotation closer to R_expected is taken + Matx33f R_expected; + if (init_phase) + R_expected = Matx33f::eye(); // in the init phase, we want to be close to the default pose = no rotation + else + R_expected = X_CM.R; // later we want to be close to the last (predicted) rotation + + // initial pose = last (predicted) pose + Vec3f k; + get_row(R_expected, 2, k); + float Z0 = init_phase ? 1000 : X_CM.t[2]; + + float old_epsilon_1 = 0; + float old_epsilon_2 = 0; + float epsilon_1 = 1; + float epsilon_2 = 1; + + Vec3f I0, J0; + Vec2f I0_coeff, J0_coeff; + + Vec3f I_1, J_1, I_2, J_2; + Matx33f R_1, R_2; + Matx33f* R_current; + + const int MAX_ITER = 100; + const float EPS_THRESHOLD = 1e-4; + + int i=1; + for (; iM01)/Z0; + epsilon_2 = k.dot(point_model->M02)/Z0; + + // vector of scalar products and + Vec2f I0_M0i(p[1][0]*(1.0 + epsilon_1) - p[0][0], + p[2][0]*(1.0 + epsilon_2) - p[0][0]); + Vec2f J0_M0i(p[1][1]*(1.0 + epsilon_1) - p[0][1], + p[2][1]*(1.0 + epsilon_2) - p[0][1]); + + // construct projection of I, J onto M0i plane: I0 and J0 + I0_coeff = point_model->P * I0_M0i; + J0_coeff = point_model->P * J0_M0i; + I0 = I0_coeff[0]*point_model->M01 + I0_coeff[1]*point_model->M02; + J0 = J0_coeff[0]*point_model->M01 + J0_coeff[1]*point_model->M02; + + // calculate u component of I, J + float II0 = I0.dot(I0); + float IJ0 = I0.dot(J0); + float JJ0 = J0.dot(J0); + float rho, theta; + if (JJ0 == II0) { + rho = sqrt(abs(2*IJ0)); + theta = -PI/4; + if (IJ0<0) theta *= -1; + } + else { + rho = sqrt(sqrt( (JJ0-II0)*(JJ0-II0) + 4*IJ0*IJ0 )); + theta = atan( -2*IJ0 / (JJ0-II0) ); + if (JJ0 - II0 < 0) theta += PI; + theta /= 2; + } - obj_points.push_back(headpos); - obj_points.push_back(point_model->M01 + headpos); - obj_points.push_back(point_model->M02 + headpos); + // construct the two solutions + I_1 = I0 + rho*cos(theta)*point_model->u; + I_2 = I0 - rho*cos(theta)*point_model->u; - img_points.push_back(p[0]); - img_points.push_back(p[1]); - img_points.push_back(p[2]); + J_1 = J0 + rho*sin(theta)*point_model->u; + J_2 = J0 - rho*sin(theta)*point_model->u; - const float HT_PI = 3.1415926535; + float norm_const = 1.0/norm(I_1); // all have the same norm + + // create rotation matrices + I_1 *= norm_const; J_1 *= norm_const; + I_2 *= norm_const; J_2 *= norm_const; - const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); - const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); + set_row(R_1, 0, I_1); + set_row(R_1, 1, J_1); + set_row(R_1, 2, I_1.cross(J_1)); + + set_row(R_2, 0, I_2); + set_row(R_2, 1, J_2); + set_row(R_2, 2, I_2.cross(J_2)); - cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); - intrinsics.at (0, 0) = focal_length_w; - intrinsics.at (1, 1) = focal_length_h; - intrinsics.at (0, 2) = w/2; - intrinsics.at (1, 2) = h/2; + // the single translation solution + Z0 = norm_const * f; - cv::Mat dist_coeffs = cv::Mat::zeros(5, 1, CV_32FC1); + // pick the rotation solution closer to the expected one + // in simple metric d(A,B) = || I - A * B^T || + float R_1_deviation = norm(Matx33f::eye() - R_expected * R_1.t()); + float R_2_deviation = norm(Matx33f::eye() - R_expected * R_2.t()); - bool lastp = !rvec.empty() && !tvec.empty() && !init_phase; + if (R_1_deviation < R_2_deviation) + R_current = &R_1; + else + R_current = &R_2; - cv::solvePnP(obj_points, img_points, intrinsics, dist_coeffs, rvec, tvec, lastp, cv::ITERATIVE); + get_row(*R_current, 2, k); - cv::Mat rmat; - cv::Rodrigues(rvec, rmat); + // check for convergence condition + if (abs(epsilon_1 - old_epsilon_1) + abs(epsilon_2 - old_epsilon_2) < EPS_THRESHOLD) + break; + old_epsilon_1 = epsilon_1; + old_epsilon_2 = epsilon_2; + } // apply results - for (int i = 0; i < 3; i++) - { - X_CM.t[i] = tvec.at(i); - for (int j = 0; j < 3; j++) - X_CM.R(i, j) = rmat.at(i, j); - } + X_CM.R = *R_current; + X_CM.t[0] = p[0][0] * Z0/f; + X_CM.t[1] = p[0][1] * Z0/f; + X_CM.t[2] = Z0; + + return i; + + //Rodrigues(X_CM.R, r); + //qDebug()<<"iter: "<& points, float fov, float dt, int w, int h, const cv::Vec3f &headpos); + bool track(const std::vector& points, float f, float dt); boost::shared_ptr point_model; bool dynamic_pose_resolution; @@ -98,35 +98,15 @@ public: FrameTrafo get_pose() const { return X_CM; } void reset(); - float fov; - int _w, _h; protected: - cv::Vec2f project(const cv::Vec3f& v_M) + inline cv::Vec2f project(const cv::Vec3f& v_M, float f) { - if (!rvec.empty() && !tvec.empty() && fov > 0) - { - const float HT_PI = 3.1415926535; - const int w = _w, h = _h; - const float focal_length_w = 0.5 * w / tan(fov * HT_PI / 180); - const float focal_length_h = 0.5 * h / tan(fov * h / w * HT_PI / 180.0); - - cv::Mat intrinsics = cv::Mat::eye(3, 3, CV_32FC1); - intrinsics.at (0, 0) = focal_length_w; - intrinsics.at (1, 1) = focal_length_h; - intrinsics.at (0, 2) = w/2; - intrinsics.at (1, 2) = h/2; - std::vector xs; - xs.push_back(v_M); - cv::Mat dist_coeffs = cv::Mat::zeros(5, 1, CV_32FC1); - std::vector rets(1); - cv::projectPoints(xs, rvec, tvec, intrinsics, dist_coeffs, rets); - return rets[0]; - } - return cv::Vec2f(); + cv::Vec3f v_C = X_CM * v_M; + return cv::Vec2f(f*v_C[0]/v_C[2], f*v_C[1]/v_C[2]); } - bool find_correspondences(const std::vector& points); + bool find_correspondences(const std::vector& points, float f); cv::Vec2f p[PointModel::N_POINTS]; // the points in model order cv::Vec2f p_exp[PointModel::N_POINTS]; // the expected point positions @@ -136,7 +116,7 @@ protected: void reset_velocities(); - void POSIT(float fov, int w, int h, const cv::Vec3f &headpos); // The POSIT algorithm, returns the number of iterations + int POSIT(float f); // The POSIT algorithm, returns the number of iterations bool init_phase; float dt_valid; // time since last valid tracking result @@ -144,7 +124,6 @@ protected: cv::Vec3f v_r; FrameTrafo X_CM; // trafo from model to camera FrameTrafo X_CM_old; - cv::Mat rvec, tvec; }; #endif //POINTTRACKER_H -- cgit v1.2.3