summaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--CMakeLists.txt3
-rw-r--r--facetracknoir/quat.hpp (renamed from facetracknoir/rotation.h)22
-rw-r--r--facetracknoir/tracker.h2
-rw-r--r--facetracknoir/tracker_types.h14
-rw-r--r--ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp148
-rw-r--r--ftnoir_tracker_aruco/ftnoir_tracker_aruco.h13
-rw-r--r--ftnoir_tracker_aruco/include/aruco.h29
-rw-r--r--ftnoir_tracker_aruco/include/arucofidmarkers.h15
-rw-r--r--ftnoir_tracker_aruco/include/board.h168
-rw-r--r--ftnoir_tracker_aruco/include/boarddetector.h139
-rw-r--r--ftnoir_tracker_aruco/include/cameraparameters.h6
-rw-r--r--ftnoir_tracker_aruco/include/cvdrawingutils.h19
-rw-r--r--ftnoir_tracker_aruco/include/exports.h6
-rw-r--r--ftnoir_tracker_aruco/include/marker.h16
-rw-r--r--ftnoir_tracker_aruco/include/markerdetector.h56
-rw-r--r--ftnoir_tracker_aruco/trans_calib.h2
-rw-r--r--ftnoir_tracker_pt/camera.h2
-rw-r--r--ftnoir_tracker_pt/ftnoir_tracker_pt.h54
-rw-r--r--ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp88
-rw-r--r--ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h4
-rw-r--r--ftnoir_tracker_pt/point_extractor.h4
-rw-r--r--ftnoir_tracker_pt/point_tracker.h74
-rw-r--r--ftnoir_tracker_pt/pt_video_widget.h2
-rw-r--r--ftnoir_tracker_pt/trans_calib.h2
24 files changed, 287 insertions, 601 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 655d70c3..2de8856d 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -344,7 +344,6 @@ opentrack_library(opentrack-tracker-ht)
target_link_libraries(opentrack-tracker-ht opentrack-compat)
if(SDK_ARUCO_LIBPATH)
- include_directories(${CMAKE_SOURCE_DIR}/ftnoir_tracker_aruco/include)
opentrack_library(opentrack-tracker-aruco)
target_link_libraries(opentrack-tracker-aruco ${SDK_ARUCO_LIBPATH} ${OpenCV_LIBS})
endif()
@@ -477,7 +476,7 @@ install(DIRECTORY "${CMAKE_SOURCE_DIR}/3rdparty-notices" DESTINATION .)
install(FILES "${CMAKE_SOURCE_DIR}/bin/NPClient.dll" "${CMAKE_SOURCE_DIR}/bin/NPClient64.dll" "${CMAKE_SOURCE_DIR}/bin/TrackIR.exe" DESTINATION .)
install(DIRECTORY "${CMAKE_SOURCE_DIR}/bin/settings" "${CMAKE_SOURCE_DIR}/facetracknoir/clientfiles" DESTINATION .)
-if(NOT WIN32 AND SDK_WINE_PREFIX)
+if(NOT WIN32 AND SDK_WINE_PREFIX AND NOT SDK_WINE_NO_WRAPPER)
install(FILES "${CMAKE_BINARY_DIR}/opentrack-wrapper-wine.exe.so"
DESTINATION .)
endif()
diff --git a/facetracknoir/rotation.h b/facetracknoir/quat.hpp
index b3bb891e..1e268963 100644
--- a/facetracknoir/rotation.h
+++ b/facetracknoir/quat.hpp
@@ -9,10 +9,13 @@
#include <cmath>
class Quat {
-
+private:
+ static constexpr double pi = 3.141592653;
+ static constexpr double r2d = 180./pi;
+ double a,b,c,d; // quaternion coefficients
public:
Quat() : a(1.0),b(0.0),c(0.0),d(0.0) {}
- Quat(double yaw, double pitch, double roll) { fromEuler(yaw, pitch, roll); }
+ Quat(double yaw, double pitch, double roll) { from_euler_rads(yaw, pitch, roll); }
Quat(double a, double b, double c, double d) : a(a),b(b),c(c),d(d) {}
Quat inv(){
@@ -21,7 +24,7 @@ public:
// conversions
// see http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
- void fromEuler(double yaw, double pitch, double roll)
+ void from_euler_rads(double yaw, double pitch, double roll)
{
double sin_phi = sin(roll/2.0);
@@ -37,13 +40,21 @@ public:
d = cos_phi*cos_the*sin_psi - sin_phi*sin_the*cos_psi;
}
- void toEuler(double& yaw, double& pitch, double& roll) const
+ void to_euler_rads(double& yaw, double& pitch, double& roll) const
{
roll = atan2(2.0*(a*b + c*d), 1.0 - 2.0*(b*b + c*c));
pitch = asin(2.0*(a*c - b*d));
yaw = atan2(2.0*(a*d + b*c), 1.0 - 2.0*(c*c + d*d));
}
+ void to_euler_degrees(double& yaw, double& pitch, double& roll) const
+ {
+ to_euler_rads(yaw, pitch, roll);
+ yaw *= r2d;
+ pitch *= r2d;
+ roll *= r2d;
+ }
+
const Quat operator*(const Quat& B) const
{
const Quat& A = *this;
@@ -52,7 +63,4 @@ public:
A.a*B.c - A.b*B.d + A.c*B.a + A.d*B.b,
A.a*B.d + A.b*B.c - A.c*B.b + A.d*B.a);
}
-
-private:
- double a,b,c,d; // quaternion coefficients
};
diff --git a/facetracknoir/tracker.h b/facetracknoir/tracker.h
index 05ae4180..3d9a3858 100644
--- a/facetracknoir/tracker.h
+++ b/facetracknoir/tracker.h
@@ -22,8 +22,6 @@
#include "facetracknoir/options.h"
#include "facetracknoir/timer.hpp"
-
-
class Tracker : protected QThread {
Q_OBJECT
private:
diff --git a/facetracknoir/tracker_types.h b/facetracknoir/tracker_types.h
index c667498e..02aacdcf 100644
--- a/facetracknoir/tracker_types.h
+++ b/facetracknoir/tracker_types.h
@@ -2,14 +2,14 @@
#include <utility>
#include <algorithm>
-#include "rotation.h"
-#include "plugin-api.hpp"
+#include "./quat.hpp"
+#include "./plugin-api.hpp"
struct T6DOF {
private:
- static constexpr double PI = 3.14159265358979323846264;
- static constexpr double D2R = PI/180.0;
- static constexpr double R2D = 180.0/PI;
+ static constexpr double pi = 3.141592653;
+ static constexpr double d2r = pi/180.0;
+ static constexpr double r2d = 180./pi;
double axes[6];
public:
@@ -23,13 +23,13 @@ public:
Quat quat() const
{
- return Quat(axes[Yaw]*D2R, axes[Pitch]*D2R, axes[Roll]*D2R);
+ return Quat(axes[Yaw]*d2r, axes[Pitch]*d2r, axes[Roll]*d2r);
}
static T6DOF fromQuat(const Quat& q)
{
T6DOF ret;
- q.toEuler(ret(Yaw), ret(Pitch), ret(Roll));
+ q.to_euler_rads(ret(Yaw), ret(Pitch), ret(Roll));
return ret;
}
diff --git a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp
index e216d319..a1e15721 100644
--- a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp
+++ b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.cpp
@@ -8,13 +8,13 @@
#include "ftnoir_tracker_aruco.h"
#include "ui_aruco-trackercontrols.h"
#include "facetracknoir/plugin-api.hpp"
-#include <cmath>
#include <QMutexLocker>
-#include <aruco.h>
-#include <opencv2/opencv.hpp>
-#include <opencv/highgui.h>
+#include "include/markerdetector.h"
+#include <opencv2/core/core.hpp>
+#include <opencv2/highgui/highgui.hpp>
#include <vector>
#include <cstdio>
+#include <cmath>
#if defined(_WIN32)
# undef NOMINMAX
@@ -29,51 +29,51 @@
static QList<QString> get_camera_names(void) {
QList<QString> ret;
#if defined(_WIN32)
- // Create the System Device Enumerator.
- HRESULT hr;
- ICreateDevEnum *pSysDevEnum = NULL;
- hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum);
- if (FAILED(hr))
- {
- return ret;
- }
- // Obtain a class enumerator for the video compressor category.
- IEnumMoniker *pEnumCat = NULL;
- hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);
-
- if (hr == S_OK) {
- // Enumerate the monikers.
- IMoniker *pMoniker = NULL;
- ULONG cFetched;
- while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) {
- IPropertyBag *pPropBag;
- hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
- if (SUCCEEDED(hr)) {
- // To retrieve the filter's friendly name, do the following:
- VARIANT varName;
- VariantInit(&varName);
- hr = pPropBag->Read(L"FriendlyName", &varName, 0);
- if (SUCCEEDED(hr))
- {
- // Display the name in your UI somehow.
- QString str((QChar*)varName.bstrVal, wcslen(varName.bstrVal));
- ret.append(str);
- }
- VariantClear(&varName);
-
- ////// To create an instance of the filter, do the following:
- ////IBaseFilter *pFilter;
- ////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,
- //// (void**)&pFilter);
- // Now add the filter to the graph.
- //Remember to release pFilter later.
- pPropBag->Release();
- }
- pMoniker->Release();
- }
- pEnumCat->Release();
- }
- pSysDevEnum->Release();
+ // Create the System Device Enumerator.
+ HRESULT hr;
+ ICreateDevEnum *pSysDevEnum = NULL;
+ hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER, IID_ICreateDevEnum, (void **)&pSysDevEnum);
+ if (FAILED(hr))
+ {
+ return ret;
+ }
+ // Obtain a class enumerator for the video compressor category.
+ IEnumMoniker *pEnumCat = NULL;
+ hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);
+
+ if (hr == S_OK) {
+ // Enumerate the monikers.
+ IMoniker *pMoniker = NULL;
+ ULONG cFetched;
+ while (pEnumCat->Next(1, &pMoniker, &cFetched) == S_OK) {
+ IPropertyBag *pPropBag;
+ hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag, (void **)&pPropBag);
+ if (SUCCEEDED(hr)) {
+ // To retrieve the filter's friendly name, do the following:
+ VARIANT varName;
+ VariantInit(&varName);
+ hr = pPropBag->Read(L"FriendlyName", &varName, 0);
+ if (SUCCEEDED(hr))
+ {
+ // Display the name in your UI somehow.
+ QString str((QChar*)varName.bstrVal, wcslen(varName.bstrVal));
+ ret.append(str);
+ }
+ VariantClear(&varName);
+
+ ////// To create an instance of the filter, do the following:
+ ////IBaseFilter *pFilter;
+ ////hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,
+ //// (void**)&pFilter);
+ // Now add the filter to the graph.
+ //Remember to release pFilter later.
+ pPropBag->Release();
+ }
+ pMoniker->Release();
+ }
+ pEnumCat->Release();
+ }
+ pSysDevEnum->Release();
#else
for (int i = 0; i < 16; i++) {
char buf[128];
@@ -89,15 +89,15 @@ static QList<QString> get_camera_names(void) {
}
typedef struct {
- int width;
- int height;
+ int width;
+ int height;
} resolution_tuple;
static resolution_tuple resolution_choices[] = {
- { 640, 480 },
- { 320, 240 },
- { 320, 200 },
- { 0, 0 }
+ { 640, 480 },
+ { 320, 240 },
+ { 320, 200 },
+ { 0, 0 }
};
Tracker::Tracker() : stop(false), layout(nullptr), videoWidget(nullptr)
@@ -108,8 +108,8 @@ Tracker::~Tracker()
{
stop = true;
wait();
- if (videoWidget)
- delete videoWidget;
+ if (videoWidget)
+ delete videoWidget;
if(layout)
delete layout;
qDebug() << "releasing camera, brace for impact";
@@ -178,7 +178,7 @@ void Tracker::run()
}
if (fps)
camera.set(CV_CAP_PROP_FPS, fps);
-
+
aruco::MarkerDetector detector;
detector.setDesiredSpeed(3);
@@ -187,7 +187,7 @@ void Tracker::run()
cv::Mat color, color_, grayscale, rvec, tvec;
const double stateful_coeff = 0.88;
-
+
if (!camera.isOpened())
{
fprintf(stderr, "aruco tracker: can't open camera\n");
@@ -214,7 +214,7 @@ void Tracker::run()
grayscale = channel[2];
} else
cv::cvtColor(color, grayscale, cv::COLOR_BGR2GRAY);
-
+
gain.tick(camera, grayscale);
const int scale = frame.cols > 480 ? 2 : 1;
@@ -280,11 +280,11 @@ void Tracker::run()
cv::putText(frame, buf, cv::Point(10, 32), cv::FONT_HERSHEY_PLAIN, scale, cv::Scalar(0, 255, 0), scale);
::sprintf(buf, "Jiffies: %ld", (long) (10000 * (time - tm) / freq));
cv::putText(frame, buf, cv::Point(10, 54), cv::FONT_HERSHEY_PLAIN, scale, cv::Scalar(80, 255, 0), scale);
-
+
if (markers.size() == 1 && markers[0].size() == 4) {
const auto& m = markers.at(0);
const float size = 40;
-
+
const double p = s.marker_pitch;
const double sq = sin(p * HT_PI / 180);
const double cq = cos(p * HT_PI / 180);
@@ -380,7 +380,7 @@ void Tracker::run()
void Tracker::GetHeadPoseData(double *data)
{
QMutexLocker lck(&mtx);
-
+
data[Yaw] = pose[Yaw];
data[Pitch] = pose[Pitch];
data[Roll] = pose[Roll];
@@ -391,11 +391,11 @@ void Tracker::GetHeadPoseData(double *data)
class TrackerDll : public Metadata
{
- // ITrackerDll interface
- void getFullName(QString *strToBeFilled);
- void getShortName(QString *strToBeFilled);
- void getDescription(QString *strToBeFilled);
- void getIcon(QIcon *icon);
+ // ITrackerDll interface
+ void getFullName(QString *strToBeFilled);
+ void getShortName(QString *strToBeFilled);
+ void getDescription(QString *strToBeFilled);
+ void getIcon(QIcon *icon);
};
//-----------------------------------------------------------------------------
@@ -406,12 +406,12 @@ void TrackerDll::getFullName(QString *strToBeFilled)
void TrackerDll::getShortName(QString *strToBeFilled)
{
- *strToBeFilled = "aruco";
+ *strToBeFilled = "aruco";
}
void TrackerDll::getDescription(QString *strToBeFilled)
{
- *strToBeFilled = "";
+ *strToBeFilled = "";
}
void TrackerDll::getIcon(QIcon *icon)
@@ -425,7 +425,7 @@ void TrackerDll::getIcon(QIcon *icon)
extern "C" OPENTRACK_EXPORT Metadata* GetMetadata()
{
- return new TrackerDll;
+ return new TrackerDll;
}
//#pragma comment(linker, "/export:GetTracker=_GetTracker@0")
@@ -444,11 +444,11 @@ TrackerControls::TrackerControls()
{
tracker = nullptr;
calib_timer.setInterval(200);
- ui.setupUi(this);
+ ui.setupUi(this);
setAttribute(Qt::WA_NativeWindow, true);
ui.cameraName->addItems(get_camera_names());
tie_setting(s.camera_index, ui.cameraName);
- tie_setting(s.resolution, ui.resolution);
+ tie_setting(s.resolution, ui.resolution);
tie_setting(s.force_fps, ui.cameraFPS);
tie_setting(s.fov, ui.cameraFOV);
tie_setting(s.headpos_x, ui.cx);
@@ -500,7 +500,7 @@ void TrackerControls::doOK()
s.b->save();
if (tracker)
tracker->reload();
- this->close();
+ this->close();
}
void TrackerControls::doCancel()
diff --git a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.h b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.h
index 5416bb52..9ac57417 100644
--- a/ftnoir_tracker_aruco/ftnoir_tracker_aruco.h
+++ b/ftnoir_tracker_aruco/ftnoir_tracker_aruco.h
@@ -16,12 +16,9 @@
#include <QHBoxLayout>
#include <QDialog>
#include <QTimer>
-#include <opencv2/opencv.hpp>
-#include <opencv/highgui.h>
#include "facetracknoir/options.h"
#include "ftnoir_tracker_aruco/trans_calib.h"
#include "facetracknoir/plugin-api.hpp"
-
#include "facetracknoir/gain-control.hpp"
using namespace options;
@@ -50,7 +47,7 @@ class Tracker : protected QThread, public ITracker
{
Q_OBJECT
public:
- Tracker();
+ Tracker();
~Tracker() override;
void StartTracker(QFrame* frame);
void GetHeadPoseData(double *data);
@@ -61,7 +58,7 @@ private:
QMutex mtx;
volatile bool stop;
QHBoxLayout* layout;
- ArucoVideoWidget* videoWidget;
+ ArucoVideoWidget* videoWidget;
settings s;
double pose[6];
cv::Mat frame;
@@ -83,14 +80,14 @@ public:
tracker = nullptr;
}
private:
- Ui::Form ui;
+ Ui::Form ui;
Tracker* tracker;
settings s;
TranslationCalibrator calibrator;
QTimer calib_timer;
private slots:
- void doOK();
- void doCancel();
+ void doOK();
+ void doCancel();
void toggleCalibrate();
void cleanupCalib();
void update_tracker_calibration();
diff --git a/ftnoir_tracker_aruco/include/aruco.h b/ftnoir_tracker_aruco/include/aruco.h
index 569b95fb..8ea583a8 100644
--- a/ftnoir_tracker_aruco/include/aruco.h
+++ b/ftnoir_tracker_aruco/include/aruco.h
@@ -26,12 +26,12 @@ The views and conclusions contained in the software and documentation are those
authors and should not be interpreted as representing official policies, either expressed
or implied, of Rafael Muñoz Salinas.
-
-
+
+
\mainpage ArUco: Augmented Reality library from the University of Cordoba
-ArUco is a minimal C++ library for detection of Augmented Reality markers based on OpenCv exclusively.
+ArUco is a minimal C++ library for detection of Augmented Reality markers based on OpenCv exclusively.
It is an educational project to show student how to detect augmented reality markers and it is provided under BSD license.
@@ -54,11 +54,11 @@ Aruco allows the possibility to employ board. Boards are markers composed by an
The library comes with five applications that will help you to learn how to use the library:
- aruco_create_marker: which creates marker and saves it in a jpg file you can print.
- - aruco_simple : simple test aplication that detects the markers in a image
+ - aruco_simple : simple test aplication that detects the markers in a image
- aruco_test: this is the main application for detection. It reads images either from the camera of from a video and detect markers. Additionally, if you provide the intrinsics of the camera(obtained by OpenCv calibration) and the size of the marker in meters, the library calculates the marker intrinsics so that you can easily create your AR applications.
- aruco_test_gl: shows how to use the library AR applications using OpenGL for rendering
- aruco_create_board: application that helps you to create a board
- - aruco_simple_board: simple test aplication that detects a board of markers in a image
+ - aruco_simple_board: simple test aplication that detects a board of markers in a image
- aruco_test_board: application that detects boards
- aruco_test_board_gl: application that detects boards and uses OpenGL to draw
@@ -66,7 +66,7 @@ The library comes with five applications that will help you to learn how to use
The ArUco library contents are divided in two main directories. The src directory, which contains the library itself. And the utils directory which contains the applications.
-The library main classes are:
+The library main classes are:
- aruco::CameraParameters: represent the information of the camera that captures the images. Here you must set the calibration info.
- aruco::Marker: which represent a marker detected in the image
- aruco::MarkerDetector: that is in charge of deteting the markers in a image Detection is done by simple calling the member funcion ArMarkerDetector::detect(). Additionally, the classes contain members to create the required matrices for rendering using OpenGL. See aruco_test_gl for details
@@ -101,34 +101,33 @@ The library has been compiled using MinGW and codeblocks. Below I describe the b
-# Download the source code and compile it using cmake and codeblocks. Note: install the library in C:\ if you want it to be easily detected by cmake afterwards
- step 4) aruco
-# Download and decompress.
- -# Open cmake gui application and set the path to the main library directory and also set a path where the project is going to be built.
+ -# Open cmake gui application and set the path to the main library directory and also set a path where the project is going to be built.
-# Generate the codeblock project.
-# Open the project with codeblock and compile then, install. The programs will be probably generated into the bin directory
OpenGL: by default, the mingw version installed has not the glut library. So, the opengl programs are not compiled. If you want to compile with OpenGL support, you must install glut, or prefereably freeglut.
-Thus,
- - Download the library (http://www.martinpayne.me.uk/software/development/GLUT/freeglut-MinGW.zip) for mingw.
- - Decompress in a directory X.
+Thus,
+ - Download the library (http://www.martinpayne.me.uk/software/development/GLUT/freeglut-MinGW.zip) for mingw.
+ - Decompress in a directory X.
- Then, rerun cmake setting the variable GLU_PATH to that directory (>cmake .. -DGLUT_PATH="C:\X")
- Finally, recompile and test. Indeed, you should move the freeglut.dll to the directory with the binaries or to any other place in the PATH.
CONCLUSION: Move to Linux, things are simpler :P
-
-\section Testing
+
+\section Testing
For testing the applications, the library provides videos and the corresponding camera parameters of these videos. Into the directories you will find information on how to run the examples.
-
+
\section Final Notes
- REQUIREMENTS: OpenCv >= 2.1.0. and OpenGL for (aruco_test_gl and aruco_test_board_gl)
- CONTACT: Rafael Munoz-Salinas: rmsalinas@uco.es
- This libary is free software and come with no guaratee!
-
+
*/
#include "markerdetector.h"
-#include "boarddetector.h"
#include "cvdrawingutils.h"
diff --git a/ftnoir_tracker_aruco/include/arucofidmarkers.h b/ftnoir_tracker_aruco/include/arucofidmarkers.h
index 7dad4672..15eb8e4c 100644
--- a/ftnoir_tracker_aruco/include/arucofidmarkers.h
+++ b/ftnoir_tracker_aruco/include/arucofidmarkers.h
@@ -31,7 +31,6 @@ or implied, of Rafael Muñoz Salinas.
#include <opencv2/core/core.hpp>
#include "exports.h"
#include "marker.h"
-#include "board.h"
namespace aruco {
class ARUCO_EXPORTS FiducidalMarkers {
@@ -80,7 +79,7 @@ public:
* @param gridSize grid layout (numer of sqaures in x and Y)
* @param MarkerSize size of markers sides in pixels
* @param MarkerDistance distance between the markers
- * @param TInfo output
+ * @param TInfo output
* @param excludedIds set of ids excluded from the board
*/
static cv::Mat createBoardImage( cv::Size gridSize,int MarkerSize,int MarkerDistance, BoardConfiguration& TInfo ,vector<int> *excludedIds=NULL ) throw (cv::Exception);
@@ -89,24 +88,24 @@ public:
/**Creates a printable image of a board in chessboard_like manner
* @param gridSize grid layout (numer of sqaures in x and Y)
* @param MarkerSize size of markers sides in pixels
- * @param TInfo output
+ * @param TInfo output
* @param setDataCentered indicates if the center is set at the center of the board. Otherwise it is the left-upper corner
- *
+ *
*/
static cv::Mat createBoardImage_ChessBoard( cv::Size gridSize,int MarkerSize, BoardConfiguration& TInfo ,bool setDataCentered=true ,vector<int> *excludedIds=NULL) throw (cv::Exception);
- /**Creates a printable image of a board in a frame fashion
+ /**Creates a printable image of a board in a frame fashion
* @param gridSize grid layout (numer of sqaures in x and Y)
* @param MarkerSize size of markers sides in pixels
* @param MarkerDistance distance between the markers
- * @param TInfo output
+ * @param TInfo output
* @param setDataCentered indicates if the center is set at the center of the board. Otherwise it is the left-upper corner
- *
+ *
*/
static cv::Mat createBoardImage_Frame( cv::Size gridSize,int MarkerSize,int MarkerDistance, BoardConfiguration& TInfo ,bool setDataCentered=true,vector<int> *excludedIds=NULL ) throw (cv::Exception);
private:
-
+
static vector<int> getListOfValidMarkersIds_random(int nMarkers,vector<int> *excluded) throw (cv::Exception);
static cv::Mat rotate(const cv::Mat & in);
static int hammDistMarker(cv::Mat bits);
diff --git a/ftnoir_tracker_aruco/include/board.h b/ftnoir_tracker_aruco/include/board.h
deleted file mode 100644
index c1d79292..00000000
--- a/ftnoir_tracker_aruco/include/board.h
+++ /dev/null
@@ -1,168 +0,0 @@
-/*****************************
-Copyright 2011 Rafael Muñoz Salinas. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification, are
-permitted provided that the following conditions are met:
-
- 1. Redistributions of source code must retain the above copyright notice, this list of
- conditions and the following disclaimer.
-
- 2. Redistributions in binary form must reproduce the above copyright notice, this list
- of conditions and the following disclaimer in the documentation and/or other materials
- provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY Rafael Muñoz Salinas ''AS IS'' AND ANY EXPRESS OR IMPLIED
-WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Rafael Muñoz Salinas OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-The views and conclusions contained in the software and documentation are those of the
-authors and should not be interpreted as representing official policies, either expressed
-or implied, of Rafael Muñoz Salinas.
-********************************/
-#ifndef _Aruco_board_h
-#define _Aruco_board_h
-#include <opencv2/opencv.hpp>
-#include <string>
-#include <vector>
-#include "exports.h"
-#include "marker.h"
-using namespace std;
-namespace aruco {
-/**
- * 3d representation of a marker
- */
-struct ARUCO_EXPORTS MarkerInfo:public vector<cv::Point3f> {
- MarkerInfo() {}
- MarkerInfo(int _id) {id=_id; }
- MarkerInfo(const MarkerInfo&MI): vector<cv::Point3f>(MI){id=MI.id; }
- MarkerInfo & operator=(const MarkerInfo&MI){
- vector<cv::Point3f> ::operator=(MI);
- id=MI.id;
- return *this;
- }
- int id;//maker id
-};
-
-/**\brief This class defines a board with several markers.
- * A Board contains several markers so that they are more robustly detected.
- *
- * In general, a board is a set of markers. So BoardConfiguration is only a list
- * of the id of the markers along with the position of their corners.
- *
- * The position of the corners can be specified either in pixels (in a non-specific size) or in meters.
- * The first is the typical case in which you generate the image of board and the print it. Since you do not know in advance the real
- * size of the markers, their corners are specified in pixels, and then, the translation to meters can be made once you know the real size.
- *
- * On the other hand, you may want to have the information of your boards in meters. The BoardConfiguration allows you to do so.
- *
- * The point is in the mInfoType variable. It can be either PIX or METERS according to your needs.
- *
-*/
-
-
-class ARUCO_EXPORTS BoardConfiguration: public vector<MarkerInfo>
-{
- friend class Board;
-public:
- enum MarkerInfoType {NONE=-1,PIX=0,METERS=1};//indicates if the data in MakersInfo is expressed in meters or in pixels so as to do conversion internally
- //variable indicates if the data in MakersInfo is expressed in meters or in pixels so as to do conversion internally
- int mInfoType;
- /**
- */
- BoardConfiguration();
-
- /**
- */
- BoardConfiguration(const BoardConfiguration &T);
-
- /**
- */
- BoardConfiguration & operator=(const BoardConfiguration &T);
- /**Saves the board info to a file
- */
- void saveToFile(string sfile)throw (cv::Exception);
- /**Reads board info from a file
- */
- void readFromFile(string sfile)throw (cv::Exception);
- /**Indicates if the corners are expressed in meters
- */
- bool isExpressedInMeters()const {
- return mInfoType==METERS;
- }
- /**Indicates if the corners are expressed in meters
- */
- bool isExpressedInPixels()const {
- return mInfoType==PIX;
- }
- /**Returns the index of the marker with id indicated, if is in the list
- */
- int getIndexOfMarkerId(int id)const;
- /**Returns the Info of the marker with id specified. If not in the set, throws exception
- */
- const MarkerInfo& getMarkerInfo(int id)const throw (cv::Exception);
- /**Set in the list passed the set of the ids
- */
- void getIdList(vector<int> &ids,bool append=true)const;
-private:
- /**Saves the board info to a file
- */
- void saveToFile(cv::FileStorage &fs)throw (cv::Exception);
- /**Reads board info from a file
- */
- void readFromFile(cv::FileStorage &fs)throw (cv::Exception);
-};
-
-/**
-*/
-class ARUCO_EXPORTS Board:public vector<Marker>
-{
-
-public:
- BoardConfiguration conf;
- //matrices of rotation and translation respect to the camera
- cv::Mat Rvec,Tvec;
- /**
- */
- Board()
- {
- Rvec.create(3,1,CV_32FC1);
- Tvec.create(3,1,CV_32FC1);
- for (int i=0;i<3;i++)
- Tvec.at<float>(i,0)=Rvec.at<float>(i,0)=-999999;
- }
-
- /**Given the extrinsic camera parameters returns the GL_MODELVIEW matrix for opengl.
- * Setting this matrix, the reference corrdinate system will be set in this board
- */
- void glGetModelViewMatrix(double modelview_matrix[16])throw(cv::Exception);
-
- /**
- * Returns position vector and orientation quaternion for an Ogre scene node or entity.
- * Use:
- * ...
- * Ogre::Vector3 ogrePos (position[0], position[1], position[2]);
- * Ogre::Quaternion ogreOrient (orientation[0], orientation[1], orientation[2], orientation[3]);
- * mySceneNode->setPosition( ogrePos );
- * mySceneNode->setOrientation( ogreOrient );
- * ...
- */
- void OgreGetPoseParameters( double position[3], double orientation[4] )throw(cv::Exception);
-
-
- /**Save this from a file
- */
- void saveToFile(string filePath)throw(cv::Exception);
- /**Read this from a file
- */
- void readFromFile(string filePath)throw(cv::Exception);
-
-};
-}
-
-#endif
diff --git a/ftnoir_tracker_aruco/include/boarddetector.h b/ftnoir_tracker_aruco/include/boarddetector.h
deleted file mode 100644
index 4770b5c9..00000000
--- a/ftnoir_tracker_aruco/include/boarddetector.h
+++ /dev/null
@@ -1,139 +0,0 @@
-/*****************************
-Copyright 2011 Rafael Muñoz Salinas. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification, are
-permitted provided that the following conditions are met:
-
- 1. Redistributions of source code must retain the above copyright notice, this list of
- conditions and the following disclaimer.
-
- 2. Redistributions in binary form must reproduce the above copyright notice, this list
- of conditions and the following disclaimer in the documentation and/or other materials
- provided with the distribution.
-
-THIS SOFTWARE IS PROVIDED BY Rafael Muñoz Salinas ''AS IS'' AND ANY EXPRESS OR IMPLIED
-WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Rafael Muñoz Salinas OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
-ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-The views and conclusions contained in the software and documentation are those of the
-authors and should not be interpreted as representing official policies, either expressed
-or implied, of Rafael Muñoz Salinas.
-********************************/
-#ifndef _Aruco_BoardDetector_H
-#define _Aruco_BoardDetector_H
-#include <opencv2/opencv.hpp>
-#include "exports.h"
-#include "board.h"
-#include "cameraparameters.h"
-#include "markerdetector.h"
-using namespace std;
-
-namespace aruco
-{
-
-/**\brief This class detects AR boards
- * Version 1.2
- * There are two modes for board detection.
- * First, the old way. (You first detect markers with MarkerDetector and then call to detect in this class.
- *
- * Second: New mode, marker detection is included in the class
- * \code
-
- CameraParameters CP;
- CP.readFromFile(path_cp)
- BoardConfiguration BC;
- BC.readFromFile(path_bc);
- BoardDetector BD;
- BD.setParams(BC,CP); //or only BD.setParams(BC)
- //capture image
- cv::Mat im;
- capture_image(im);
-
- float prob=BD.detect(im);
- if (prob>0.3)
- CvDrawingUtils::draw3DAxis(im,BD.getDetectedBoard(),CP);
-
- \endcode
- *
-*/
-class ARUCO_EXPORTS BoardDetector
-{
-public:
- /** See discussion in @see enableRotateXAxis.
- * Do not change unless you know what you are doing
- */
- BoardDetector(bool setYPerperdicular=true);
-
-
- /**
- * Use if you plan to let this class to perform marker detection too
- */
- void setParams(const BoardConfiguration &bc,const CameraParameters &cp, float markerSizeMeters=-1);
- void setParams(const BoardConfiguration &bc);
- /**
- * Detect markers, and then, look for the board indicated in setParams()
- * @return value indicating the likelihood of having found the marker
- */
- float detect(const cv::Mat &im)throw (cv::Exception);
- /**Returns a reference to the board detected
- */
- Board & getDetectedBoard(){return _boardDetected;}
- /**Returns a reference to the internal marker detector
- */
- MarkerDetector &getMarkerDetector(){return _mdetector;}
- /**Returns the vector of markers detected
- */
- vector<Marker> &getDetectedMarkers(){return _vmarkers;}
-
-
- //ALTERNATIVE DETECTION METHOD, BASED ON MARKERS PREVIOUSLY DETECTED
-
- /** Given the markers detected, determines if there is the board passed
- * @param detectedMarkers result provided by aruco::ArMarkerDetector
- * @param BConf the board you want to see if is present
- * @param Bdetected output information of the detected board
- * @param camMatrix camera matrix with intrinsics
- * @param distCoeff camera distorsion coeff
- * @param camMatrix intrinsic camera information.
- * @param distCoeff camera distorsion coefficient. If set Mat() if is assumed no camera distorion
- * @param markerSizeMeters size of the marker sides expressed in meters
- * @return value indicating the likelihood of having found the marker
- */
- float detect(const vector<Marker> &detectedMarkers,const BoardConfiguration &BConf, Board &Bdetected, cv::Mat camMatrix=cv::Mat(),cv::Mat distCoeff=cv::Mat(), float markerSizeMeters=-1 )throw (cv::Exception);
- float detect(const vector<Marker> &detectedMarkers,const BoardConfiguration &BConf, Board &Bdetected,const CameraParameters &cp, float markerSizeMeters=-1 )throw (cv::Exception);
-
-
- /**
- * By default, the Y axis is set to point up. However this is not the default
- * operation mode of opencv, which produces the Z axis pointing up instead.
- * So, to achieve this change, we have to rotate the X axis.
- */
- void setYPerperdicular(bool enable){_setYPerperdicular=enable;}
-
-
-
-
-private:
- void rotateXAxis(cv::Mat &rotation);
- bool _setYPerperdicular;
-
- //-- Functionality to detect markers inside
- bool _areParamsSet;
- BoardConfiguration _bconf;
- Board _boardDetected;
- float _markerSize;
- CameraParameters _camParams;
- MarkerDetector _mdetector;//internal markerdetector
- vector<Marker> _vmarkers;//markers detected in the call to : float detect(const cv::Mat &im);
-
-};
-
-}
-#endif
-
diff --git a/ftnoir_tracker_aruco/include/cameraparameters.h b/ftnoir_tracker_aruco/include/cameraparameters.h
index c3381a74..a419afbe 100644
--- a/ftnoir_tracker_aruco/include/cameraparameters.h
+++ b/ftnoir_tracker_aruco/include/cameraparameters.h
@@ -28,7 +28,7 @@ or implied, of Rafael Muñoz Salinas.
#ifndef _Aruco_CameraParameters_H
#define _Aruco_CameraParameters_H
#include "exports.h"
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#include <string>
using namespace std;
namespace aruco
@@ -105,7 +105,7 @@ public:
* @param invert: indicates if the output projection matrix has to yield a horizontally inverted image because image data has not been stored in the order of glDrawPixels: bottom-to-top.
*/
void glGetProjectionMatrix( cv::Size orgImgSize, cv::Size size,double proj_matrix[16],double gnear,double gfar,bool invert=false )throw(cv::Exception);
-
+
/**
* setup camera for an Ogre project.
* Use:
@@ -117,7 +117,7 @@ public:
* As in OpenGL, it assumes no camera distorsion
*/
void OgreGetProjectionMatrix( cv::Size orgImgSize, cv::Size size,double proj_matrix[16],double gnear,double gfar,bool invert=false )throw(cv::Exception);
-
+
private:
//GL routines
diff --git a/ftnoir_tracker_aruco/include/cvdrawingutils.h b/ftnoir_tracker_aruco/include/cvdrawingutils.h
index 38e9986e..24bfe630 100644
--- a/ftnoir_tracker_aruco/include/cvdrawingutils.h
+++ b/ftnoir_tracker_aruco/include/cvdrawingutils.h
@@ -33,19 +33,12 @@ namespace aruco
{
/**\brief A set of functions to draw in opencv images
*/
- class ARUCO_EXPORTS CvDrawingUtils
- {
- public:
-
- static void draw3dAxis(cv::Mat &Image,Marker &m,const CameraParameters &CP);
-
- static void draw3dCube(cv::Mat &Image,Marker &m,const CameraParameters &CP);
-
- static void draw3dAxis(cv::Mat &Image,Board &m,const CameraParameters &CP);
-
- static void draw3dCube(cv::Mat &Image,Board &m,const CameraParameters &CP);
-
- };
+ class ARUCO_EXPORTS CvDrawingUtils
+ {
+ public:
+ static void draw3dAxis(cv::Mat &Image,Marker &m,const CameraParameters &CP);
+ static void draw3dCube(cv::Mat &Image,Marker &m,const CameraParameters &CP);
+ };
}
#endif
diff --git a/ftnoir_tracker_aruco/include/exports.h b/ftnoir_tracker_aruco/include/exports.h
index 154605ec..044a1367 100644
--- a/ftnoir_tracker_aruco/include/exports.h
+++ b/ftnoir_tracker_aruco/include/exports.h
@@ -25,7 +25,7 @@ The views and conclusions contained in the software and documentation are those
authors and should not be interpreted as representing official policies, either expressed
or implied, of Rafael Muñoz Salinas.
********************************/
-
+
#ifndef __OPENARUCO_CORE_TYPES_H__
@@ -37,9 +37,9 @@ or implied, of Rafael Muñoz Salinas.
#if (defined WIN32 || defined _WIN32 || defined WINCE) && defined DSO_EXPORTS
- #define ARUCO_EXPORTS __declspec(dllexport)
+ #define ARUCO_EXPORTS __declspec(dllexport) __attribute__((visibility ("default")))
#else
- #define ARUCO_EXPORTS
+ #define ARUCO_EXPORTS __attribute__((visibility ("default")))
#endif
diff --git a/ftnoir_tracker_aruco/include/marker.h b/ftnoir_tracker_aruco/include/marker.h
index dc6bb28c..89961002 100644
--- a/ftnoir_tracker_aruco/include/marker.h
+++ b/ftnoir_tracker_aruco/include/marker.h
@@ -29,7 +29,7 @@ or implied, of Rafael Muñoz Salinas.
#define _Aruco_Marker_H
#include <vector>
#include <iostream>
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#include "exports.h"
#include "cameraparameters.h"
using namespace std;
@@ -81,12 +81,12 @@ public:
* @param setYPerperdicular If set the Y axis will be perpendicular to the surface. Otherwise, it will be the Z axis
*/
void calculateExtrinsics(float markerSize,cv::Mat CameraMatrix,cv::Mat Distorsion=cv::Mat(),bool setYPerperdicular=true)throw(cv::Exception);
-
+
/**Given the extrinsic camera parameters returns the GL_MODELVIEW matrix for opengl.
* Setting this matrix, the reference coordinate system will be set in this marker
*/
void glGetModelViewMatrix( double modelview_matrix[16])throw(cv::Exception);
-
+
/**
* Returns position vector and orientation quaternion for an Ogre scene node or entity.
* Use:
@@ -97,8 +97,8 @@ public:
* mySceneNode->setOrientation( ogreOrient );
* ...
*/
- void OgreGetPoseParameters( double position[3], double orientation[4] )throw(cv::Exception);
-
+ void OgreGetPoseParameters( double position[3], double orientation[4] )throw(cv::Exception);
+
/**Returns the centroid of the marker
*/
cv::Point2f getCenter()const;
@@ -132,11 +132,11 @@ public:
return str;
}
-
-
+
+
private:
void rotateXAxis(cv::Mat &rotation);
-
+
};
}
diff --git a/ftnoir_tracker_aruco/include/markerdetector.h b/ftnoir_tracker_aruco/include/markerdetector.h
index 4d6e7b90..a4656527 100644
--- a/ftnoir_tracker_aruco/include/markerdetector.h
+++ b/ftnoir_tracker_aruco/include/markerdetector.h
@@ -27,7 +27,7 @@ or implied, of Rafael Muñoz Salinas.
********************************/
#ifndef _ARUCO_MarkerDetector_H
#define _ARUCO_MarkerDetector_H
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#include <cstdio>
#include <iostream>
#include "cameraparameters.h"
@@ -47,7 +47,7 @@ class ARUCO_EXPORTS MarkerDetector
class MarkerCandidate: public Marker{
public:
MarkerCandidate(){}
- MarkerCandidate(const Marker &M): Marker(M){}
+ MarkerCandidate(const Marker &M): Marker(M){}
MarkerCandidate(const MarkerCandidate &M): Marker(M){
contour=M.contour;
idx=M.idx;
@@ -60,20 +60,20 @@ class ARUCO_EXPORTS MarkerDetector
idx=M.idx;
return M;
}
-
+
vector<cv::Point> contour;//all the points of its contour
int idx;//index position in the global contour list
};
public:
/**
- * See
+ * See
*/
- MarkerDetector();
+ MarkerDetector() {}
/**
*/
- ~MarkerDetector();
+ ~MarkerDetector() {}
/**Detects the markers in the image passed
*
@@ -161,17 +161,17 @@ public:
* of cols and rows.
* @param min size of the contour to consider a possible marker as valid (0,1]
* @param max size of the contour to consider a possible marker as valid [0,1)
- *
+ *
*/
void setMinMaxSize(float min=0.03,float max=0.5)throw(cv::Exception);
-
+
/**reads the min and max sizes employed
* @param min output size of the contour to consider a possible marker as valid (0,1]
* @param max output size of the contour to consider a possible marker as valid [0,1)
- *
+ *
*/
void getMinMaxSize(float &min,float &max){min=_minSize;max=_maxSize;}
-
+
/**Enables/Disables erosion process that is REQUIRED for chessboard like boards.
* By default, this property is enabled
*/
@@ -210,10 +210,10 @@ public:
markerIdDetector_ptrfunc=markerdetector_func;
}
- /** Use an smaller version of the input image for marker detection.
+ /** Use an smaller version of the input image for marker detection.
* If your marker is small enough, you can employ an smaller image to perform the detection without noticeable reduction in the precision.
* Internally, we are performing a pyrdown operation
- *
+ *
* @param level number of times the image size is divided by 2. Internally, we are performing a pyrdown.
*/
void pyrDown(unsigned int level){pyrdown_level=level;}
@@ -247,17 +247,17 @@ public:
* @return true if the operation succeed
*/
bool warp(cv::Mat &in,cv::Mat &out,cv::Size size, std::vector<cv::Point2f> points)throw (cv::Exception);
-
-
-
+
+
+
/** Refine MarkerCandidate Corner using LINES method
* @param candidate candidate to refine corners
*/
- void refineCandidateLines(MarkerCandidate &candidate);
-
-
+ void refineCandidateLines(MarkerCandidate &candidate);
+
+
/**DEPRECATED!!! Use the member function in CameraParameters
- *
+ *
* Given the intrinsic camera parameters returns the GL_PROJECTION matrix for opengl.
* PLease NOTE that when using OpenGL, it is assumed no camera distorsion! So, if it is not true, you should have
* undistor image
@@ -308,26 +308,26 @@ private:
*/
int perimeter(std::vector<cv::Point2f> &a);
-
+
// //GL routines
-//
+//
// static void argConvGLcpara2( double cparam[3][4], int width, int height, double gnear, double gfar, double m[16], bool invert )throw(cv::Exception);
// static int arParamDecompMat( double source[3][4], double cpara[3][4], double trans[3][4] )throw(cv::Exception);
// static double norm( double a, double b, double c );
// static double dot( double a1, double a2, double a3,
// double b1, double b2, double b3 );
-//
+//
//detection of the
void findBestCornerInRegion_harris(const cv::Mat & grey,vector<cv::Point2f> & Corners,int blockSize);
-
-
+
+
// auxiliar functions to perform LINES refinement
void interpolate2Dline( const vector< cv::Point > &inPoints, cv::Point3f &outLine);
- cv::Point2f getCrossPoint(const cv::Point3f& line1, const cv::Point3f& line2);
-
-
- /**Given a vector vinout with elements and a boolean vector indicating the lements from it to remove,
+ cv::Point2f getCrossPoint(const cv::Point3f& line1, const cv::Point3f& line2);
+
+
+ /**Given a vector vinout with elements and a boolean vector indicating the lements from it to remove,
* this function remove the elements
* @param vinout
* @param toRemove
diff --git a/ftnoir_tracker_aruco/trans_calib.h b/ftnoir_tracker_aruco/trans_calib.h
index 5c321b2c..c2c02b38 100644
--- a/ftnoir_tracker_aruco/trans_calib.h
+++ b/ftnoir_tracker_aruco/trans_calib.h
@@ -8,7 +8,7 @@
#ifndef TRANSCALIB_H
#define TRANSCALIB_H
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
//-----------------------------------------------------------------------------
// Calibrates the translation from head to model = t_MH
diff --git a/ftnoir_tracker_pt/camera.h b/ftnoir_tracker_pt/camera.h
index 7ebbcb67..889bf2d3 100644
--- a/ftnoir_tracker_pt/camera.h
+++ b/ftnoir_tracker_pt/camera.h
@@ -8,7 +8,7 @@
#ifndef CAMERA_H
#define CAMERA_H
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#ifndef OPENTRACK_API
# include <boost/shared_ptr.hpp>
#else
diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt.h b/ftnoir_tracker_pt/ftnoir_tracker_pt.h
index 5bcfd37d..fff8d4ab 100644
--- a/ftnoir_tracker_pt/ftnoir_tracker_pt.h
+++ b/ftnoir_tracker_pt/ftnoir_tracker_pt.h
@@ -22,7 +22,7 @@
#include <QMutex>
#include <QMutexLocker>
#include <QTime>
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#include <atomic>
#ifndef OPENTRACK_API
# include <boost/shared_ptr.hpp>
@@ -36,53 +36,53 @@
class Tracker : public ITracker, protected QThread
{
public:
- Tracker();
+ Tracker();
~Tracker() override;
void StartTracker(QFrame* parent_window) override;
void GetHeadPoseData(double* data) override;
void apply(settings& s);
- void apply_inner();
- void center();
- void reset(); // reset the trackers internal state variables
+ void apply_inner();
+ void center();
+ void reset(); // reset the trackers internal state variables
- void get_pose(FrameTrafo* X_CM) { QMutexLocker lock(&mutex); *X_CM = point_tracker.get_pose(); }
- int get_n_points() { QMutexLocker lock(&mutex); return point_extractor.get_points().size(); }
- void get_cam_info(CamInfo* info) { QMutexLocker lock(&mutex); *info = camera.get_info(); }
+ void get_pose(FrameTrafo* X_CM) { QMutexLocker lock(&mutex); *X_CM = point_tracker.get_pose(); }
+ int get_n_points() { QMutexLocker lock(&mutex); return point_extractor.get_points().size(); }
+ void get_cam_info(CamInfo* info) { QMutexLocker lock(&mutex); *info = camera.get_info(); }
protected:
void run() override;
private:
- QMutex mutex;
- // thread commands
- enum Command {
- ABORT = 1<<0
- };
- void set_command(Command command);
- void reset_command(Command command);
+ QMutex mutex;
+ // thread commands
+ enum Command {
+ ABORT = 1<<0
+ };
+ void set_command(Command command);
+ void reset_command(Command command);
volatile int commands;
CVCamera camera;
- FrameRotation frame_rotation;
- PointExtractor point_extractor;
- PointTracker point_tracker;
+ FrameRotation frame_rotation;
+ PointExtractor point_extractor;
+ PointTracker point_tracker;
- FrameTrafo X_GH_0; // for centering
- cv::Vec3f t_MH; // translation from model frame to head frame
- cv::Matx33f R_GC; // rotation from opengl reference frame to camera frame
+ FrameTrafo X_GH_0; // for centering
+ cv::Vec3f t_MH; // translation from model frame to head frame
+ cv::Matx33f R_GC; // rotation from opengl reference frame to camera frame
- // --- ui ---
- cv::Mat frame; // the output frame for display
+ // --- ui ---
+ cv::Mat frame; // the output frame for display
PTVideoWidget* video_widget;
- QFrame* video_frame;
-
+ QFrame* video_frame;
+
settings s;
std::atomic<settings*> new_settings;
Timer time;
-
+
static constexpr double rad2deg = 180.0/3.14159265;
static constexpr double deg2rad = 3.14159265/180.0;
-
+
PointModel model;
};
diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp b/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp
index 6cd6135c..3af7b560 100644
--- a/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp
+++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_dialog.cpp
@@ -9,7 +9,7 @@
#include <QMessageBox>
#include <QDebug>
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#ifndef OPENTRACK_API
# include <boost/shared_ptr.hpp>
#else
@@ -25,14 +25,14 @@ TrackerDialog::TrackerDialog()
timer(this),
trans_calib_running(false)
{
- ui.setupUi( this );
+ ui.setupUi( this );
vector<string> device_names;
- get_camera_device_names(device_names);
+ get_camera_device_names(device_names);
for (vector<string>::iterator iter = device_names.begin(); iter != device_names.end(); ++iter)
- {
- ui.camdevice_combo->addItem(iter->c_str());
- }
+ {
+ ui.camdevice_combo->addItem(iter->c_str());
+ }
ui.camroll_combo->addItem("-90");
ui.camroll_combo->addItem("0");
@@ -82,7 +82,7 @@ TrackerDialog::TrackerDialog()
connect(ui.model_tabs, SIGNAL(currentChanged(int)), this, SLOT(set_model(int)));
connect(&timer,SIGNAL(timeout()), this,SLOT(poll_tracker_info()));
- timer.start(100);
+ timer.start(100);
connect(ui.buttonBox_2, SIGNAL(clicked(QAbstractButton*)), this, SLOT(do_apply_without_saving(QAbstractButton*)));
}
@@ -96,7 +96,7 @@ void TrackerDialog::set_model_clip()
s.m02_y = -static_cast<double>(s.clip_by);
s.m02_z = -static_cast<double>(s.clip_bz);
- settings_changed();
+ settings_changed();
}
void TrackerDialog::set_model_cap()
@@ -108,12 +108,12 @@ void TrackerDialog::set_model_cap()
s.m02_y = -static_cast<double>(s.cap_y);
s.m02_z = -static_cast<double>(s.cap_z);
- settings_changed();
+ settings_changed();
}
void TrackerDialog::set_model_custom()
{
- settings_changed();
+ settings_changed();
}
void TrackerDialog::set_model(int val)
@@ -123,38 +123,38 @@ void TrackerDialog::set_model(int val)
void TrackerDialog::startstop_trans_calib(bool start)
{
- if (start)
- {
- qDebug()<<"TrackerDialog:: Starting translation calibration";
- trans_calib.reset();
- trans_calib_running = true;
- }
- else
- {
- qDebug()<<"TrackerDialog:: Stoppping translation calibration";
- trans_calib_running = false;
+ if (start)
+ {
+ qDebug()<<"TrackerDialog:: Starting translation calibration";
+ trans_calib.reset();
+ trans_calib_running = true;
+ }
+ else
+ {
+ qDebug()<<"TrackerDialog:: Stoppping translation calibration";
+ trans_calib_running = false;
{
auto tmp = trans_calib.get_estimate();
s.t_MH_x = tmp[0];
s.t_MH_y = tmp[1];
s.t_MH_z = tmp[2];
}
- settings_changed();
- }
+ settings_changed();
+ }
}
void TrackerDialog::poll_tracker_info()
{
if (tracker)
- {
+ {
QString to_print;
-
+
// display caminfo
CamInfo info;
tracker->get_cam_info(&info);
to_print = QString::number(info.res_x)+"x"+QString::number(info.res_y)+" @ "+QString::number(info.fps)+" FPS";
ui.caminfo_label->setText(to_print);
-
+
// display pointinfo
int n_points = tracker->get_n_points();
to_print = QString::number(n_points);
@@ -163,7 +163,7 @@ void TrackerDialog::poll_tracker_info()
else
to_print += " BAD!";
ui.pointinfo_label->setText(to_print);
-
+
// update calibration
if (trans_calib_running) trans_calib_step();
}
@@ -177,16 +177,16 @@ void TrackerDialog::poll_tracker_info()
void TrackerDialog::trans_calib_step()
{
- if (tracker)
- {
- FrameTrafo X_CM;
- tracker->get_pose(&X_CM);
- trans_calib.update(X_CM.R, X_CM.t);
- cv::Vec3f t_MH = trans_calib.get_estimate();
+ if (tracker)
+ {
+ FrameTrafo X_CM;
+ tracker->get_pose(&X_CM);
+ trans_calib.update(X_CM.R, X_CM.t);
+ cv::Vec3f t_MH = trans_calib.get_estimate();
s.t_MH_x = t_MH[0];
s.t_MH_y = t_MH[1];
s.t_MH_z = t_MH[2];
- }
+ }
}
void TrackerDialog::settings_changed()
@@ -203,7 +203,7 @@ void TrackerDialog::save()
void TrackerDialog::doOK()
{
save();
- close();
+ close();
}
void TrackerDialog::do_apply_without_saving(QAbstractButton*)
@@ -225,7 +225,7 @@ void TrackerDialog::do_apply_without_saving(QAbstractButton*)
void TrackerDialog::doApply()
{
- save();
+ save();
}
void TrackerDialog::doCancel()
@@ -236,23 +236,23 @@ void TrackerDialog::doCancel()
void TrackerDialog::registerTracker(ITracker *t)
{
- qDebug()<<"TrackerDialog:: Tracker registered";
- tracker = static_cast<Tracker*>(t);
+ qDebug()<<"TrackerDialog:: Tracker registered";
+ tracker = static_cast<Tracker*>(t);
if (isVisible() & s.b->modifiedp())
tracker->apply(s);
- ui.tcalib_button->setEnabled(true);
- //ui.center_button->setEnabled(true);
+ ui.tcalib_button->setEnabled(true);
+ //ui.center_button->setEnabled(true);
}
void TrackerDialog::unRegisterTracker()
{
- qDebug()<<"TrackerDialog:: Tracker un-registered";
- tracker = NULL;
- ui.tcalib_button->setEnabled(false);
- //ui.center_button->setEnabled(false);
+ qDebug()<<"TrackerDialog:: Tracker un-registered";
+ tracker = NULL;
+ ui.tcalib_button->setEnabled(false);
+ //ui.center_button->setEnabled(false);
}
extern "C" OPENTRACK_EXPORT ITrackerDialog* GetDialog( )
{
- return new TrackerDialog;
+ return new TrackerDialog;
}
diff --git a/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h b/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h
index 365776e4..e8cac679 100644
--- a/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h
+++ b/ftnoir_tracker_pt/ftnoir_tracker_pt_settings.h
@@ -8,7 +8,7 @@
#ifndef FTNOIR_TRACKER_PT_SETTINGS_H
#define FTNOIR_TRACKER_PT_SETTINGS_H
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#include "point_tracker.h"
#include "facetracknoir/options.h"
@@ -36,7 +36,7 @@ struct settings
value<int> clip_ty, clip_tz, clip_by, clip_bz;
value<int> active_model_panel, cap_x, cap_y, cap_z;
-
+
// XXX todo red channel only, good for crapola CCD sensors -sh 20140922
settings() :
diff --git a/ftnoir_tracker_pt/point_extractor.h b/ftnoir_tracker_pt/point_extractor.h
index 3ef82900..5252b68d 100644
--- a/ftnoir_tracker_pt/point_extractor.h
+++ b/ftnoir_tracker_pt/point_extractor.h
@@ -8,8 +8,8 @@
#ifndef POINTEXTRACTOR_H
#define POINTEXTRACTOR_H
-#include <opencv2/opencv.hpp>
-#include <opencv2/imgproc/imgproc_c.h>
+#include <opencv2/core/core.hpp>
+#include <opencv2/imgproc/imgproc.hpp>
// ----------------------------------------------------------------------------
// Extracts points from an opencv image
diff --git a/ftnoir_tracker_pt/point_tracker.h b/ftnoir_tracker_pt/point_tracker.h
index c8212538..d65494a4 100644
--- a/ftnoir_tracker_pt/point_tracker.h
+++ b/ftnoir_tracker_pt/point_tracker.h
@@ -8,7 +8,7 @@
#ifndef POINTTRACKER_H
#define POINTTRACKER_H
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#ifndef OPENTRACK_API
# include <boost/shared_ptr.hpp>
#else
@@ -21,31 +21,31 @@
class FrameTrafo
{
public:
- FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {}
- FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {}
+ FrameTrafo() : R(cv::Matx33f::eye()), t(0,0,0) {}
+ FrameTrafo(const cv::Matx33f& R, const cv::Vec3f& t) : R(R),t(t) {}
- cv::Matx33f R;
- cv::Vec3f t;
+ cv::Matx33f R;
+ cv::Vec3f t;
};
inline FrameTrafo operator*(const FrameTrafo& X, const FrameTrafo& Y)
{
- return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t);
+ return FrameTrafo(X.R*Y.R, X.R*Y.t + X.t);
}
inline FrameTrafo operator*(const cv::Matx33f& X, const FrameTrafo& Y)
{
- return FrameTrafo(X*Y.R, X*Y.t);
+ return FrameTrafo(X*Y.R, X*Y.t);
}
inline FrameTrafo operator*(const FrameTrafo& X, const cv::Matx33f& Y)
{
- return FrameTrafo(X.R*Y, X.t);
+ return FrameTrafo(X.R*Y, X.t);
}
inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v)
{
- return X.R*v + X.t;
+ return X.R*v + X.t;
}
@@ -55,28 +55,28 @@ inline cv::Vec3f operator*(const FrameTrafo& X, const cv::Vec3f& v)
// [Denis Oberkampf, Daniel F. DeMenthon, Larry S. Davis: "Iterative Pose Estimation Using Coplanar Feature Points"]
class PointModel
{
- friend class PointTracker;
+ friend class PointTracker;
public:
- static constexpr int N_POINTS = 3;
+ static constexpr int N_POINTS = 3;
- PointModel(cv::Vec3f M01, cv::Vec3f M02);
+ PointModel(cv::Vec3f M01, cv::Vec3f M02);
PointModel();
- inline const cv::Vec3f& get_M01() const { return M01; }
- inline const cv::Vec3f& get_M02() const { return M02; }
+ inline const cv::Vec3f& get_M01() const { return M01; }
+ inline const cv::Vec3f& get_M02() const { return M02; }
private:
- cv::Vec3f M01; // M01 in model frame
- cv::Vec3f M02; // M02 in model frame
+ cv::Vec3f M01; // M01 in model frame
+ cv::Vec3f M02; // M02 in model frame
- cv::Vec3f u; // unit vector perpendicular to M01,M02-plane
+ cv::Vec3f u; // unit vector perpendicular to M01,M02-plane
- cv::Matx22f P;
+ cv::Matx22f P;
- cv::Vec2f d; // determinant vector for point correspondence
- int d_order[3]; // sorting of projected model points with respect to d scalar product
+ cv::Vec2f d; // determinant vector for point correspondence
+ int d_order[3]; // sorting of projected model points with respect to d scalar product
- void get_d_order(const std::vector<cv::Vec2f>& points, int d_order[]) const;
+ void get_d_order(const std::vector<cv::Vec2f>& points, int d_order[]) const;
};
// ----------------------------------------------------------------------------
@@ -86,29 +86,29 @@ private:
class PointTracker
{
public:
- PointTracker();
- // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5)
- // f : (focal length)/(sensor width)
- // dt : time since last call
- void track(const std::vector<cv::Vec2f>& projected_points, const PointModel& model);
- FrameTrafo get_pose() const { return X_CM; }
- void reset();
+ PointTracker();
+ // track the pose using the set of normalized point coordinates (x pos in range -0.5:0.5)
+ // f : (focal length)/(sensor width)
+ // dt : time since last call
+ void track(const std::vector<cv::Vec2f>& projected_points, const PointModel& model);
+ FrameTrafo get_pose() const { return X_CM; }
+ void reset();
private:
// the points in model order
typedef struct { cv::Vec2f points[PointModel::N_POINTS]; } PointOrder;
- static constexpr float focal_length = 1.0f;
-
- inline cv::Vec2f project(const cv::Vec3f& v_M)
- {
- cv::Vec3f v_C = X_CM * v_M;
- return cv::Vec2f(focal_length*v_C[0]/v_C[2], focal_length*v_C[1]/v_C[2]);
- }
+ static constexpr float focal_length = 1.0f;
+
+ inline cv::Vec2f project(const cv::Vec3f& v_M)
+ {
+ cv::Vec3f v_C = X_CM * v_M;
+ return cv::Vec2f(focal_length*v_C[0]/v_C[2], focal_length*v_C[1]/v_C[2]);
+ }
PointOrder find_correspondences(const std::vector<cv::Vec2f>& projected_points, const PointModel &model);
int POSIT(const PointModel& point_model, const PointOrder& order); // The POSIT algorithm, returns the number of iterations
-
- FrameTrafo X_CM; // trafo from model to camera
+
+ FrameTrafo X_CM; // trafo from model to camera
};
#endif //POINTTRACKER_H
diff --git a/ftnoir_tracker_pt/pt_video_widget.h b/ftnoir_tracker_pt/pt_video_widget.h
index de2c7efb..f2b41d63 100644
--- a/ftnoir_tracker_pt/pt_video_widget.h
+++ b/ftnoir_tracker_pt/pt_video_widget.h
@@ -10,7 +10,7 @@
#include <QObject>
#include <QTime>
#include <QDialog>
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
#ifndef OPENTRACK_API
# include <QGLWidget>
# include <boost/shared_ptr.hpp>
diff --git a/ftnoir_tracker_pt/trans_calib.h b/ftnoir_tracker_pt/trans_calib.h
index 5c321b2c..c2c02b38 100644
--- a/ftnoir_tracker_pt/trans_calib.h
+++ b/ftnoir_tracker_pt/trans_calib.h
@@ -8,7 +8,7 @@
#ifndef TRANSCALIB_H
#define TRANSCALIB_H
-#include <opencv2/opencv.hpp>
+#include <opencv2/core/core.hpp>
//-----------------------------------------------------------------------------
// Calibrates the translation from head to model = t_MH