summaryrefslogtreecommitdiffhomepage
path: root/tracker-kinect-face
diff options
context:
space:
mode:
Diffstat (limited to 'tracker-kinect-face')
-rw-r--r--tracker-kinect-face/CMakeLists.txt55
-rw-r--r--tracker-kinect-face/camera_kinect_ir.cpp301
-rw-r--r--tracker-kinect-face/camera_kinect_ir.h82
-rw-r--r--tracker-kinect-face/images/kinect.pngbin0 -> 217 bytes
-rw-r--r--tracker-kinect-face/kinect_face.qrc5
-rw-r--r--tracker-kinect-face/kinect_face_settings.cpp38
-rw-r--r--tracker-kinect-face/kinect_face_settings.h32
-rw-r--r--tracker-kinect-face/kinect_face_settings.ui74
-rw-r--r--tracker-kinect-face/kinect_face_tracker.cpp614
-rw-r--r--tracker-kinect-face/kinect_face_tracker.h121
-rw-r--r--tracker-kinect-face/lang/nl_NL.ts29
-rw-r--r--tracker-kinect-face/lang/ru_RU.ts29
-rw-r--r--tracker-kinect-face/lang/stub.ts29
-rw-r--r--tracker-kinect-face/lang/zh_CN.ts29
14 files changed, 1438 insertions, 0 deletions
diff --git a/tracker-kinect-face/CMakeLists.txt b/tracker-kinect-face/CMakeLists.txt
new file mode 100644
index 00000000..d205a764
--- /dev/null
+++ b/tracker-kinect-face/CMakeLists.txt
@@ -0,0 +1,55 @@
+# Kinect SDK is Windows only
+if (WIN32 AND opentrack-intel)
+ # Setup cache variable to Kinect SDK path
+ set(SDK_KINECT20 "$ENV{KINECTSDK20_DIR}" CACHE PATH "Kinect SDK path")
+ # If we have a valid SDK path, try build that tracker
+ if(SDK_KINECT20)
+ # Register our module
+ otr_module(tracker-kinect-face)
+
+ if(MSVC)
+ # workaround warning in SDK
+ target_compile_options(${self} PRIVATE "-wd4471")
+ endif()
+
+ # Add include path to Kinect SDK
+ target_include_directories(${self} SYSTEM PRIVATE "${SDK_KINECT20}/inc")
+
+ # Check processor architecture
+ if(opentrack-64bit)
+ # 64 bits
+ set(kinect-arch-dir "x64")
+ else()
+ # 32 bits
+ set(kinect-arch-dir "x86")
+ endif()
+
+ # Link against Kinect SDK libraries
+ target_link_libraries(${self} "${SDK_KINECT20}/lib/${kinect-arch-dir}/Kinect20.lib" "${SDK_KINECT20}/lib/${kinect-arch-dir}/Kinect20.Face.lib")
+ # Link against video utilities, needed for video preview
+ target_link_libraries(${self} opentrack-video)
+
+ # Install Kinect Face DLL
+ install(FILES "${SDK_KINECT20}/Redist/Face/${kinect-arch-dir}/Kinect20.Face.dll" DESTINATION "${opentrack-libexec}" PERMISSIONS ${opentrack-perms-exec})
+ # Install Kinect Face Database
+ install(DIRECTORY "${SDK_KINECT20}/Redist/Face/${kinect-arch-dir}/NuiDatabase" DESTINATION "${opentrack-libexec}")
+
+ set(redist-dir "${CMAKE_SOURCE_DIR}/redist/${kinect-arch-dir}")
+ #install(
+ # FILES "${redist-dir}/msvcp110.dll" "${redist-dir}/msvcr110.dll"
+ # DESTINATION "${opentrack-libexec}"
+ # PERMISSIONS ${opentrack-perms-exec}
+ #)
+
+ # Optional OpenCV support
+ # Needed for Point Tracker to support Kinect V2 IR Sensor
+ include(opentrack-opencv)
+ find_package(OpenCV QUIET)
+
+ if(OpenCV_FOUND)
+ add_definitions(-DOTR_HAVE_OPENCV)
+ target_include_directories(${self} SYSTEM PUBLIC ${OpenCV_INCLUDE_DIRS})
+ target_link_libraries(${self} opencv_imgproc opentrack-cv opencv_core opentrack-video)
+ endif()
+ endif()
+endif()
diff --git a/tracker-kinect-face/camera_kinect_ir.cpp b/tracker-kinect-face/camera_kinect_ir.cpp
new file mode 100644
index 00000000..3a33fd14
--- /dev/null
+++ b/tracker-kinect-face/camera_kinect_ir.cpp
@@ -0,0 +1,301 @@
+/* Copyright (c) 2019, Stephane Lenclud <github@lenclud.com>
+
+ * Permission to use, copy, modify, and/or distribute this
+ * software for any purpose with or without fee is hereby granted,
+ * provided that the above copyright notice and this permission
+ * notice appear in all copies.
+ */
+
+#include "camera_kinect_ir.h"
+
+#ifdef OTR_HAVE_OPENCV
+
+ //#include "frame.hpp"
+
+#include "compat/sleep.hpp"
+#include "compat/math-imports.hpp"
+#include "compat/camera-names.hpp"
+
+#include <opencv2/imgproc.hpp>
+#include <cstdlib>
+
+namespace Kinect {
+
+ static const char KKinectIRSensor[] = "Kinect V2 IR Sensor";
+
+ // Safe release for interfaces
+ template<class Interface>
+ inline void SafeRelease(Interface *& pInterfaceToRelease)
+ {
+ if (pInterfaceToRelease != NULL)
+ {
+ pInterfaceToRelease->Release();
+ pInterfaceToRelease = NULL;
+ }
+ }
+
+ CamerasProvider::CamerasProvider() = default;
+
+ std::unique_ptr<video::impl::camera> CamerasProvider::make_camera(const QString& name)
+ {
+ if (name.compare(KKinectIRSensor) == 0)
+ {
+ return std::make_unique<InfraredCamera>();
+ }
+
+ return nullptr;
+ }
+
+ std::vector<QString> CamerasProvider::camera_names() const
+ {
+ auto list = get_camera_names();
+ auto it = std::find_if(list.cbegin(), list.cend(), [](const auto& x) {
+ const auto& [name, idx] = x;
+ return name.startsWith("Kinect V2 Video Sensor [");
+ });
+ if (it != list.cend())
+ {
+ // We found Kinect V2 Video Sensor therefore we have a kinect V2 connected.
+ // Publish our Kinect V2 IR Sensor implementation then.
+ return { KKinectIRSensor };
+ }
+ else
+ {
+ return {};
+ }
+ }
+
+ bool CamerasProvider::can_show_dialog(const QString& camera_name)
+ {
+ return false;
+ }
+
+ bool CamerasProvider::show_dialog(const QString& camera_name)
+ {
+ return false;
+ }
+
+ // Register our camera provider thus making sure Point Tracker can use Kinect V2 IR Sensor
+ OTR_REGISTER_CAMERA(CamerasProvider)
+
+
+ InfraredCamera::InfraredCamera()
+ {
+ }
+
+
+ InfraredCamera::~InfraredCamera()
+ {
+ stop();
+ }
+
+ bool InfraredCamera::show_dialog()
+ {
+ return false;
+ }
+
+ bool InfraredCamera::is_open()
+ {
+ return iInfraredFrameReader != nullptr;
+ }
+
+ ///
+ /// Wait until we get a first frame
+ ///
+ void InfraredCamera::WaitForFirstFrame()
+ {
+ bool new_frame = false;
+ int attempts = 200; // Kinect cold start can take a while
+ while (!new_frame && attempts > 0)
+ {
+ new_frame = get_frame_(iMatFrame);
+ portable::sleep(100);
+ --attempts;
+ }
+ }
+
+
+
+ std::tuple<const video::impl::frame&, bool> InfraredCamera::get_frame()
+ {
+ bool new_frame = false;
+ new_frame = get_frame_(iMatFrame);
+
+ iFrame.data = iMatFrame.ptr();
+ iFrame.width = iWidth;
+ iFrame.height = iHeight;
+ iFrame.stride = cv::Mat::AUTO_STEP;
+ iFrame.channels = iMatFrame.channels();
+ iFrame.channel_size = iMatFrame.elemSize1();
+ return { iFrame, new_frame };
+ }
+
+ ///
+ ///
+ ///
+ bool InfraredCamera::start(info& aInfo)
+ {
+ stop();
+
+ HRESULT hr;
+
+ // Get and open Kinect sensor
+ hr = GetDefaultKinectSensor(&iKinectSensor);
+ if (SUCCEEDED(hr))
+ {
+ hr = iKinectSensor->Open();
+ }
+
+ // Create infrared frame reader
+ if (SUCCEEDED(hr))
+ {
+ // Initialize the Kinect and get the infrared reader
+ IInfraredFrameSource* pInfraredFrameSource = NULL;
+
+ hr = iKinectSensor->Open();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = iKinectSensor->get_InfraredFrameSource(&pInfraredFrameSource);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pInfraredFrameSource->OpenReader(&iInfraredFrameReader);
+ }
+
+ SafeRelease(pInfraredFrameSource);
+
+ if (SUCCEEDED(hr))
+ {
+ iKinectSensor->get_CoordinateMapper(&iCoordinateMapper);
+ }
+ }
+
+
+ if (SUCCEEDED(hr))
+ {
+ WaitForFirstFrame();
+ bool success = iMatFrame.ptr() != nullptr;
+ if (success)
+ {
+ // Provide frame info
+ aInfo.width = iWidth;
+ aInfo.height = iHeight;
+
+ CameraIntrinsics intrinsics;
+ hr = iCoordinateMapper->GetDepthCameraIntrinsics(&intrinsics);
+ if (SUCCEEDED(hr))
+ {
+ aInfo.fx = intrinsics.FocalLengthX;
+ aInfo.fy = intrinsics.FocalLengthY;
+ aInfo.P_x = intrinsics.PrincipalPointX;
+ aInfo.P_y = intrinsics.PrincipalPointY;
+ aInfo.dist_c[1] = intrinsics.RadialDistortionSecondOrder;
+ aInfo.dist_c[3] = intrinsics.RadialDistortionFourthOrder;
+ aInfo.dist_c[5] = intrinsics.RadialDistortionSixthOrder;
+ }
+
+ }
+
+ return success;
+ }
+
+ stop();
+ return false;
+ }
+
+ void InfraredCamera::stop()
+ {
+ // done with infrared frame reader
+ SafeRelease(iInfraredFrame);
+ SafeRelease(iInfraredFrameReader);
+
+ // close the Kinect Sensor
+ if (iKinectSensor)
+ {
+ iKinectSensor->Close();
+ }
+
+ SafeRelease(iCoordinateMapper);
+ SafeRelease(iKinectSensor);
+
+ // Free up our memory buffer if any
+ iMatFrame = cv::Mat();
+ }
+
+ bool InfraredCamera::get_frame_(cv::Mat& aFrame)
+ {
+
+ if (!iInfraredFrameReader)
+ {
+ return false;
+ }
+
+ bool success = false;
+
+ // Release previous frame if any
+ SafeRelease(iInfraredFrame);
+
+ Sleep(34); // FIXME
+ HRESULT hr = iInfraredFrameReader->AcquireLatestFrame(&iInfraredFrame);
+
+ if (SUCCEEDED(hr))
+ {
+ if (iFirstFrame)
+ {
+ IFrameDescription* frameDescription = NULL;
+
+ if (SUCCEEDED(hr))
+ {
+ hr = iInfraredFrame->get_FrameDescription(&frameDescription);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = frameDescription->get_Width(&iWidth);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = frameDescription->get_Height(&iHeight);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = frameDescription->get_DiagonalFieldOfView(&iFov);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ iFirstFrame = false;
+ }
+
+ SafeRelease(frameDescription);
+ }
+
+
+ UINT nBufferSize = 0;
+ UINT16 *pBuffer = NULL;
+
+ if (SUCCEEDED(hr))
+ {
+ hr = iInfraredFrame->AccessUnderlyingBuffer(&nBufferSize, &pBuffer);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // Create an OpenCV matrix with our 16-bits IR buffer
+ aFrame = cv::Mat(iHeight, iWidth, CV_16UC1, pBuffer, cv::Mat::AUTO_STEP);
+ // Any processing of the frame is left to the user
+ success = true;
+ }
+ }
+
+
+ return success;
+ }
+
+}
+
+#endif
diff --git a/tracker-kinect-face/camera_kinect_ir.h b/tracker-kinect-face/camera_kinect_ir.h
new file mode 100644
index 00000000..83acb4d6
--- /dev/null
+++ b/tracker-kinect-face/camera_kinect_ir.h
@@ -0,0 +1,82 @@
+/* Copyright (c) 2019, Stephane Lenclud <github@lenclud.com>
+
+ * Permission to use, copy, modify, and/or distribute this
+ * software for any purpose with or without fee is hereby granted,
+ * provided that the above copyright notice and this permission
+ * notice appear in all copies.
+ */
+
+#pragma once
+
+#ifdef OTR_HAVE_OPENCV
+
+#include <Kinect.h>
+
+//#include "pt-api.hpp"
+#include "compat/timer.hpp"
+#include "video/camera.hpp"
+
+
+#include <memory>
+
+#include <opencv2/core.hpp>
+#include <opencv2/videoio.hpp>
+
+#include <QString>
+
+namespace Kinect {
+
+ struct CamerasProvider : video::impl::camera_
+ {
+ CamerasProvider();
+ std::vector<QString> camera_names() const override;
+ std::unique_ptr<video::impl::camera> make_camera(const QString& name) override;
+ bool can_show_dialog(const QString& camera_name) override;
+ bool show_dialog(const QString& camera_name) override;
+ };
+
+
+///
+/// Implement our camera interface using Kinect V2 SDK IR Sensor.
+///
+struct InfraredCamera final : video::impl::camera
+{
+ InfraredCamera();
+ ~InfraredCamera() override;
+
+ // From video::impl::camera
+ [[nodiscard]] bool start(info& args) override;
+ void stop() override;
+ bool is_open() override;
+ std::tuple<const video::impl::frame&, bool> get_frame() override;
+ [[nodiscard]] bool show_dialog() override;
+
+private:
+ bool get_frame_(cv::Mat& frame);
+ void WaitForFirstFrame();
+
+private:
+ // Current Kinect
+ IKinectSensor* iKinectSensor = nullptr;
+
+ // Infrared reader
+ IInfraredFrameReader* iInfraredFrameReader = nullptr;
+
+ // Frame needs to stay alive while we access the data buffer
+ IInfraredFrame* iInfraredFrame = nullptr;
+
+ //
+ ICoordinateMapper* iCoordinateMapper = nullptr;
+
+ video::frame iFrame;
+ cv::Mat iMatFrame;
+
+ float iFov = 0;
+ int iWidth = 0, iHeight = 0;
+ bool iFirstFrame = true;
+};
+
+}
+
+
+#endif
diff --git a/tracker-kinect-face/images/kinect.png b/tracker-kinect-face/images/kinect.png
new file mode 100644
index 00000000..fd8f5f77
--- /dev/null
+++ b/tracker-kinect-face/images/kinect.png
Binary files differ
diff --git a/tracker-kinect-face/kinect_face.qrc b/tracker-kinect-face/kinect_face.qrc
new file mode 100644
index 00000000..8b27c81e
--- /dev/null
+++ b/tracker-kinect-face/kinect_face.qrc
@@ -0,0 +1,5 @@
+<RCC>
+ <qresource prefix="/">
+ <file>images/kinect.png</file>
+ </qresource>
+</RCC>
diff --git a/tracker-kinect-face/kinect_face_settings.cpp b/tracker-kinect-face/kinect_face_settings.cpp
new file mode 100644
index 00000000..d8012aa3
--- /dev/null
+++ b/tracker-kinect-face/kinect_face_settings.cpp
@@ -0,0 +1,38 @@
+/* Copyright (c) 2019, Stephane Lenclud <github@lenclud.com>
+
+ * Permission to use, copy, modify, and/or distribute this
+ * software for any purpose with or without fee is hereby granted,
+ * provided that the above copyright notice and this permission
+ * notice appear in all copies.
+ */
+
+#include "kinect_face_settings.h"
+#include "kinect_face_tracker.h"
+#include "api/plugin-api.hpp"
+#include "compat/math-imports.hpp"
+#include "compat/library-path.hpp"
+
+#include <cmath>
+
+#include <QDesktopServices>
+#include <QUrl>
+#include <QPushButton>
+#include <QDebug>
+
+KinectFaceSettings::KinectFaceSettings()
+{
+ ui.setupUi(this);
+
+ connect(ui.buttonBox, &QDialogButtonBox::accepted, this, &KinectFaceSettings::close);
+ connect(ui.buttonBox, &QDialogButtonBox::rejected, this, &KinectFaceSettings::close);
+
+ static const QUrl path {"file:///" + application_base_path() + OPENTRACK_DOC_PATH "/3rdparty-notices/Kinect-V2-SDK-Eula.rtf" };
+
+ connect(ui.buttonBox, &QDialogButtonBox::helpRequested, [] {
+ QDesktopServices::openUrl(path);
+ });
+
+ ui.buttonBox->addButton(tr("Kinect license"), QDialogButtonBox::HelpRole);
+}
+
+OPENTRACK_DECLARE_TRACKER(KinectFaceTracker, KinectFaceSettings, KinectFaceMetadata)
diff --git a/tracker-kinect-face/kinect_face_settings.h b/tracker-kinect-face/kinect_face_settings.h
new file mode 100644
index 00000000..2c5cc55f
--- /dev/null
+++ b/tracker-kinect-face/kinect_face_settings.h
@@ -0,0 +1,32 @@
+/* Copyright (c) 2019, Stephane Lenclud <github@lenclud.com>
+
+ * Permission to use, copy, modify, and/or distribute this
+ * software for any purpose with or without fee is hereby granted,
+ * provided that the above copyright notice and this permission
+ * notice appear in all copies.
+ */
+
+#pragma once
+#include "ui_kinect_face_settings.h"
+#include "api/plugin-api.hpp"
+
+class KinectFaceSettings : public ITrackerDialog
+{
+ Q_OBJECT
+
+ Ui::KinectFaceUi ui;
+
+public:
+ KinectFaceSettings();
+ void register_tracker(ITracker *) override {}
+ void unregister_tracker() override {}
+};
+
+class KinectFaceMetadata : public Metadata
+{
+ Q_OBJECT
+
+ QString name() override { return tr("Kinect Face 0.1"); }
+ QIcon icon() override { return QIcon(":/images/kinect.png"); }
+};
+
diff --git a/tracker-kinect-face/kinect_face_settings.ui b/tracker-kinect-face/kinect_face_settings.ui
new file mode 100644
index 00000000..9e4b2b41
--- /dev/null
+++ b/tracker-kinect-face/kinect_face_settings.ui
@@ -0,0 +1,74 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ui version="4.0">
+ <class>KinectFaceUi</class>
+ <widget class="QWidget" name="KinectFaceUi">
+ <property name="windowModality">
+ <enum>Qt::NonModal</enum>
+ </property>
+ <property name="geometry">
+ <rect>
+ <x>0</x>
+ <y>0</y>
+ <width>386</width>
+ <height>153</height>
+ </rect>
+ </property>
+ <property name="minimumSize">
+ <size>
+ <width>386</width>
+ <height>0</height>
+ </size>
+ </property>
+ <property name="windowTitle">
+ <string>Kinect Face Tracker</string>
+ </property>
+ <property name="windowIcon">
+ <iconset>
+ <normaloff>:/images/kinect.png</normaloff>:/images/kinect.png</iconset>
+ </property>
+ <property name="layoutDirection">
+ <enum>Qt::LeftToRight</enum>
+ </property>
+ <property name="autoFillBackground">
+ <bool>false</bool>
+ </property>
+ <layout class="QVBoxLayout" name="verticalLayout">
+ <item>
+ <widget class="QLabel" name="label">
+ <property name="sizePolicy">
+ <sizepolicy hsizetype="Minimum" vsizetype="Maximum">
+ <horstretch>0</horstretch>
+ <verstretch>0</verstretch>
+ </sizepolicy>
+ </property>
+ <property name="text">
+ <string>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align=&quot;justify&quot;&gt;Start OpenTrack to check if Kinect Face Tracker is working.&lt;/p&gt;&lt;p align=&quot;justify&quot;&gt;&lt;span style=&quot; font-weight:600;&quot;&gt;Note&lt;/span&gt;: When using OpenTrack with a Kinect for Windows v2 Sensor, Microsoft will collect telemetry data (e.g. operating system, number of processors, graphic chipset, memory, device type, locale, time) in order to improve Microsoft products and services. The data will not be used to identify specific individuals.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</string>
+ </property>
+ <property name="wordWrap">
+ <bool>true</bool>
+ </property>
+ </widget>
+ </item>
+ <item>
+ <widget class="QDialogButtonBox" name="buttonBox">
+ <property name="sizePolicy">
+ <sizepolicy hsizetype="Minimum" vsizetype="Maximum">
+ <horstretch>0</horstretch>
+ <verstretch>0</verstretch>
+ </sizepolicy>
+ </property>
+ <property name="standardButtons">
+ <set>QDialogButtonBox::Close</set>
+ </property>
+ </widget>
+ </item>
+ </layout>
+ </widget>
+ <resources/>
+ <connections/>
+ <slots>
+ <slot>startEngineClicked()</slot>
+ <slot>stopEngineClicked()</slot>
+ <slot>cameraSettingsClicked()</slot>
+ </slots>
+</ui>
diff --git a/tracker-kinect-face/kinect_face_tracker.cpp b/tracker-kinect-face/kinect_face_tracker.cpp
new file mode 100644
index 00000000..4bab2eef
--- /dev/null
+++ b/tracker-kinect-face/kinect_face_tracker.cpp
@@ -0,0 +1,614 @@
+/* Copyright (c) 2019, Stéphane Lenclud <github@lenclud.com>
+
+ * Permission to use, copy, modify, and/or distribute this
+ * software for any purpose with or without fee is hereby granted,
+ * provided that the above copyright notice and this permission
+ * notice appear in all copies.
+ */
+
+#include "kinect_face_tracker.h"
+
+#include <QLayout>
+#include <QPainter>
+
+#include "compat/check-visible.hpp"
+
+static const int KColorWidth = 1920;
+static const int KColorHeight = 1080;
+
+///
+bool IsValidRect(const RectI& aRect)
+{
+ if (aRect.Bottom != 0)
+ {
+ return true;
+ }
+
+ if (aRect.Left != 0)
+ {
+ return true;
+ }
+
+ if (aRect.Right != 0)
+ {
+ return true;
+ }
+
+ if (aRect.Top != 0)
+ {
+ return true;
+ }
+
+ return false;
+}
+
+///
+bool IsNullVetor(const Vector4& aVector)
+{
+ if (aVector.w != 0)
+ {
+ return false;
+ }
+
+ if (aVector.x != 0)
+ {
+ return false;
+ }
+
+ if (aVector.y != 0)
+ {
+ return false;
+ }
+
+ if (aVector.z != 0)
+ {
+ return false;
+ }
+
+ return true;
+}
+
+///
+bool IsNullPoint(const CameraSpacePoint& aPoint)
+{
+ if (aPoint.X != 0)
+ {
+ return false;
+ }
+
+ if (aPoint.Y != 0)
+ {
+ return false;
+ }
+
+ if (aPoint.Z != 0)
+ {
+ return false;
+ }
+
+ return true;
+}
+
+
+KinectFaceTracker::KinectFaceTracker()
+{
+ // create heap storage for color pixel data in RGBX format
+ iColorRGBX = new RGBQUAD[KColorWidth * KColorHeight];
+}
+
+KinectFaceTracker::~KinectFaceTracker()
+{
+ if (iColorRGBX)
+ {
+ delete[] iColorRGBX;
+ iColorRGBX = nullptr;
+ }
+
+ // clean up Direct2D
+ //SafeRelease(m_pD2DFactory);
+
+ // done with face sources and readers
+ SafeRelease(iFaceFrameSource);
+ SafeRelease(iFaceFrameReader);
+
+ // done with body frame reader
+ SafeRelease(iBodyFrameReader);
+
+ // done with color frame reader
+ SafeRelease(iColorFrameReader);
+
+ // close the Kinect Sensor
+ if (iKinectSensor)
+ {
+ iKinectSensor->Close();
+ }
+
+ SafeRelease(iKinectSensor);
+}
+
+module_status KinectFaceTracker::start_tracker(QFrame* aFrame)
+{
+ iTimer.start();
+
+ if (SUCCEEDED(InitializeDefaultSensor()))
+ {
+ // Setup our video preview widget
+ iVideoWidget = std::make_unique<video_widget>(aFrame);
+ iLayout = std::make_unique<QHBoxLayout>(aFrame);
+ iLayout->setContentsMargins(0, 0, 0, 0);
+ iLayout->addWidget(&*iVideoWidget);
+ aFrame->setLayout(&*iLayout);
+ //video_widget->resize(video_frame->width(), video_frame->height());
+ aFrame->show();
+
+ return status_ok();
+ }
+
+ return error("Kinect init failed!");
+}
+
+
+bool KinectFaceTracker::center()
+{
+ // Mark our center
+ iFacePositionCenter = iFacePosition;
+ iFaceRotationCenter = iFaceRotation;
+ return true;
+}
+
+//
+//
+//
+void KinectFaceTracker::data(double *data)
+{
+ const double dt = iTimer.elapsed_seconds();
+
+ const double KMinDelayInSeconds = 1.0 / 30.0; // Pointless running faster than Kinect hardware itself
+ if (dt > KMinDelayInSeconds)
+ {
+ iTimer.start(); // Reset our timer
+ //OutputDebugStringA("Updating frame!\n");
+ Update();
+ ExtractFaceRotationInDegrees(&iFaceRotationQuaternion, &iFaceRotation.X, &iFaceRotation.Y, &iFaceRotation.Z);
+ //Check if data is valid
+ if (IsValidRect(iFaceBox))
+ {
+ // We have valid tracking retain position and rotation
+ iLastFacePosition = iFacePosition;
+ iLastFaceRotation = iFaceRotation;
+ }
+ else
+ {
+ //TODO: after like 5s without tracking reset position to zero
+ //TODO: Instead of hardcoding that delay add it to our settings
+ }
+ }
+ else
+ {
+ //OutputDebugStringA("Skipping frame!\n");
+ }
+
+ // Feed our framework our last valid position and rotation
+ data[0] = (iLastFacePosition.X - iFacePositionCenter.X) * 100; // Convert to centimer to be in a range that suites OpenTrack.
+ data[1] = (iLastFacePosition.Y - iFacePositionCenter.Y) * 100;
+ data[2] = (iLastFacePosition.Z - iFacePositionCenter.Z) * 100;
+
+ // Yaw, Picth, Roll
+ data[3] = -(iLastFaceRotation.X - iFaceRotationCenter.X); // Invert to be compatible with ED out-of-the-box
+ data[4] = (iLastFaceRotation.Y - iFaceRotationCenter.Y);
+ data[5] = (iLastFaceRotation.Z - iFaceRotationCenter.Z);
+}
+
+
+/// <summary>
+/// Converts rotation quaternion to Euler angles
+/// And then maps them to a specified range of values to control the refresh rate
+/// </summary>
+/// <param name="pQuaternion">face rotation quaternion</param>
+/// <param name="pPitch">rotation about the X-axis</param>
+/// <param name="pYaw">rotation about the Y-axis</param>
+/// <param name="pRoll">rotation about the Z-axis</param>
+void KinectFaceTracker::ExtractFaceRotationInDegrees(const Vector4* pQuaternion, float* pYaw, float* pPitch, float* pRoll)
+{
+ double x = pQuaternion->x;
+ double y = pQuaternion->y;
+ double z = pQuaternion->z;
+ double w = pQuaternion->w;
+
+ // convert face rotation quaternion to Euler angles in degrees
+ double dPitch, dYaw, dRoll;
+ dPitch = atan2(2 * (y * z + w * x), w * w - x * x - y * y + z * z) / M_PI * 180.0;
+ dYaw = asin(2 * (w * y - x * z)) / M_PI * 180.0;
+ dRoll = atan2(2 * (x * y + w * z), w * w + x * x - y * y - z * z) / M_PI * 180.0;
+
+ // clamp rotation values in degrees to a specified range of values to control the refresh rate
+ /*
+ double increment = c_FaceRotationIncrementInDegrees;
+ *pPitch = static_cast<int>(floor((dPitch + increment/2.0 * (dPitch > 0 ? 1.0 : -1.0)) / increment) * increment);
+ *pYaw = static_cast<int>(floor((dYaw + increment/2.0 * (dYaw > 0 ? 1.0 : -1.0)) / increment) * increment);
+ *pRoll = static_cast<int>(floor((dRoll + increment/2.0 * (dRoll > 0 ? 1.0 : -1.0)) / increment) * increment);
+ */
+
+ *pPitch = dPitch;
+ *pYaw = dYaw;
+ *pRoll = dRoll;
+}
+
+
+
+/// <summary>
+/// Initializes the default Kinect sensor
+/// </summary>
+/// <returns>S_OK on success else the failure code</returns>
+HRESULT KinectFaceTracker::InitializeDefaultSensor()
+{
+ HRESULT hr;
+
+ // Get and open Kinect sensor
+ hr = GetDefaultKinectSensor(&iKinectSensor);
+ if (SUCCEEDED(hr))
+ {
+ hr = iKinectSensor->Open();
+ }
+
+ // Create color frame reader
+ if (SUCCEEDED(hr))
+ {
+ UniqueInterface<IColorFrameSource> colorFrameSource;
+ hr = iKinectSensor->get_ColorFrameSource(colorFrameSource.PtrPtr());
+ colorFrameSource.Reset();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = colorFrameSource->OpenReader(&iColorFrameReader);
+ }
+ }
+
+ // Create body frame reader
+ if (SUCCEEDED(hr))
+ {
+ UniqueInterface<IBodyFrameSource> bodyFrameSource;
+ hr = iKinectSensor->get_BodyFrameSource(bodyFrameSource.PtrPtr());
+ bodyFrameSource.Reset();
+
+ if (SUCCEEDED(hr))
+ {
+ hr = bodyFrameSource->OpenReader(&iBodyFrameReader);
+ }
+ }
+
+ // Create HD face frame source
+ if (SUCCEEDED(hr))
+ {
+ // create the face frame source by specifying the required face frame features
+ hr = CreateHighDefinitionFaceFrameSource(iKinectSensor, &iFaceFrameSource);
+ }
+
+ // Create HD face frame reader
+ if (SUCCEEDED(hr))
+ {
+ // open the corresponding reader
+ hr = iFaceFrameSource->OpenReader(&iFaceFrameReader);
+ }
+
+ return hr;
+}
+
+
+
+/// <summary>
+/// Main processing function
+/// </summary>
+void KinectFaceTracker::Update()
+{
+ if (!iColorFrameReader || !iBodyFrameReader)
+ {
+ return;
+ }
+
+ IColorFrame* pColorFrame = nullptr;
+ HRESULT hr = iColorFrameReader->AcquireLatestFrame(&pColorFrame);
+
+ if (SUCCEEDED(hr))
+ {
+ INT64 nTime = 0;
+ IFrameDescription* pFrameDescription = nullptr;
+ int nWidth = 0;
+ int nHeight = 0;
+ ColorImageFormat imageFormat = ColorImageFormat_None;
+ UINT nBufferSize = 0;
+ RGBQUAD *pBuffer = nullptr;
+
+ hr = pColorFrame->get_RelativeTime(&nTime);
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pColorFrame->get_FrameDescription(&pFrameDescription);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pFrameDescription->get_Width(&nWidth);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pFrameDescription->get_Height(&nHeight);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ hr = pColorFrame->get_RawColorImageFormat(&imageFormat);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ //DrawStreams(nTime, pBuffer, nWidth, nHeight);
+ ProcessFaces();
+ }
+
+ if (check_is_visible())
+ {
+ //OutputDebugStringA("Widget visible!\n");
+ // If our widget is visible we feed it our frame
+ if (SUCCEEDED(hr))
+ {
+ // Fetch color buffer
+ if (imageFormat == ColorImageFormat_Rgba)
+ {
+ hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&pBuffer));
+ }
+ else if (iColorRGBX)
+ {
+ pBuffer = iColorRGBX;
+ nBufferSize = KColorWidth * KColorHeight * sizeof(RGBQUAD);
+ hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pBuffer), ColorImageFormat_Rgba);
+ }
+ else
+ {
+ hr = E_FAIL;
+ }
+
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // Setup our image
+ QImage image((const unsigned char*)pBuffer, KColorWidth, KColorHeight, sizeof(RGBQUAD)*KColorWidth, QImage::Format_RGBA8888);
+ if (IsValidRect(iFaceBox))
+ {
+ // Draw our face bounding box
+ QPainter painter(&image);
+ painter.setBrush(Qt::NoBrush);
+ painter.setPen(QPen(Qt::red, 8));
+ painter.drawRect(iFaceBox.Left, iFaceBox.Top, iFaceBox.Right - iFaceBox.Left, iFaceBox.Bottom - iFaceBox.Top);
+ bool bEnd = painter.end();
+ (void)bEnd;
+ }
+
+ // Update our video preview
+ iVideoWidget->update_image(image);
+ }
+
+ }
+
+
+ SafeRelease(pFrameDescription);
+ }
+
+ SafeRelease(pColorFrame);
+}
+
+
+/// <summary>
+/// Updates body data
+/// </summary>
+/// <param name="ppBodies">pointer to the body data storage</param>
+/// <returns>indicates success or failure</returns>
+HRESULT KinectFaceTracker::UpdateBodyData(IBody** ppBodies)
+{
+ HRESULT hr = E_FAIL;
+
+ if (iBodyFrameReader != nullptr)
+ {
+ IBodyFrame* pBodyFrame = nullptr;
+ hr = iBodyFrameReader->AcquireLatestFrame(&pBodyFrame);
+ if (SUCCEEDED(hr))
+ {
+ hr = pBodyFrame->GetAndRefreshBodyData(BODY_COUNT, ppBodies);
+ }
+ SafeRelease(pBodyFrame);
+ }
+
+ return hr;
+}
+
+
+float VectorLengthSquared(CameraSpacePoint point)
+{
+ float lenghtSquared = pow(point.X, 2) + pow(point.Y, 2) + pow(point.Z, 2);
+
+ //result = Math.Sqrt(result);
+ return lenghtSquared;
+}
+
+//
+// Finds the closest body from the sensor if any
+//
+IBody* KinectFaceTracker::FindClosestBody(IBody** aBodies)
+{
+ IBody* result = nullptr;
+ float closestBodyDistance = std::numeric_limits<float>::max();
+
+ for(int i=0;i<BODY_COUNT;i++)
+ {
+ BOOLEAN tracked;
+ aBodies[i]->get_IsTracked(&tracked);
+
+ if (tracked)
+ {
+ Joint joints[JointType_Count];
+ HRESULT hr = aBodies[i]->GetJoints(JointType_Count,joints);
+ if (FAILED(hr))
+ {
+ continue;
+ }
+
+ auto currentLocation = joints[JointType_SpineBase].Position;
+ auto currentDistance = VectorLengthSquared(currentLocation);
+
+ if (result == nullptr || currentDistance < closestBodyDistance)
+ {
+ result = aBodies[i];
+ closestBodyDistance = currentDistance;
+ }
+ }
+ }
+
+ return result;
+}
+
+//
+// Search our list of body for the one matching our id
+//
+IBody* KinectFaceTracker::FindTrackedBodyById(IBody** aBodies, UINT64 aTrackingId)
+{
+ float closestBodyDistance = std::numeric_limits<float>::max();
+ (void)closestBodyDistance;
+
+ for (int i = 0; i < BODY_COUNT; i++)
+ {
+ BOOLEAN tracked;
+ HRESULT hr = aBodies[i]->get_IsTracked(&tracked);
+
+ if (tracked)
+ {
+ if (SUCCEEDED(hr) && tracked)
+ {
+ UINT64 trackingId = 0;
+ hr = aBodies[i]->get_TrackingId(&trackingId);
+
+ if (SUCCEEDED(hr) && aTrackingId == trackingId)
+ {
+ return aBodies[i];
+ }
+ }
+ }
+ }
+
+ return nullptr;
+}
+
+
+/// <summary>
+/// Processes new face frames
+/// </summary>
+void KinectFaceTracker::ProcessFaces()
+{
+ HRESULT hr=0;
+ IBody* bodies[BODY_COUNT] = { 0 }; // Each bodies will need to be released
+ bool bHaveBodyData = SUCCEEDED(UpdateBodyData(bodies));
+ if (!bHaveBodyData)
+ {
+ return;
+ }
+
+ // Try keep tracking the same body
+ IBody* body = FindTrackedBodyById(bodies, iTrackingId);
+ if (body == nullptr)
+ {
+ // The body we were tracking is gone, try tracking the closest body if any
+ body = FindClosestBody(bodies);
+ if (body != nullptr)
+ {
+ // Update our face source with our new body id
+ hr = body->get_TrackingId(&iTrackingId);
+ if (SUCCEEDED(hr))
+ {
+ // Tell our face source to use the given body id
+ hr = iFaceFrameSource->put_TrackingId(iTrackingId);
+ //OutputDebugStringA("Tracking new body!\n");
+ }
+ }
+ }
+
+ // retrieve the latest face frame from this reader
+ IHighDefinitionFaceFrame* pFaceFrame = nullptr;
+ if (SUCCEEDED(hr))
+ {
+ hr = iFaceFrameReader->AcquireLatestFrame(&pFaceFrame);
+ }
+
+ BOOLEAN bFaceTracked = false;
+ if (SUCCEEDED(hr) && nullptr != pFaceFrame)
+ {
+ // check if a valid face is tracked in this face frame
+ hr = pFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ if (bFaceTracked)
+ {
+ //OutputDebugStringA("Tracking face!\n");
+
+ //IFaceFrameResult* pFaceFrameResult = nullptr;
+ IFaceAlignment* pFaceAlignment = nullptr;
+ CreateFaceAlignment(&pFaceAlignment); // TODO: check return?
+ //D2D1_POINT_2F faceTextLayout;
+
+ //hr = pFaceFrame->get_FaceFrameResult(&pFaceFrameResult);
+
+ hr = pFaceFrame->GetAndRefreshFaceAlignmentResult(pFaceAlignment);
+
+ // need to verify if pFaceFrameResult contains data before trying to access it
+ if (SUCCEEDED(hr) && pFaceAlignment != nullptr)
+ {
+ hr = pFaceAlignment->get_FaceBoundingBox(&iFaceBox);
+ //pFaceFrameResult->get_FaceBoundingBoxInColorSpace();
+
+ if (SUCCEEDED(hr))
+ {
+ //hr = pFaceFrameResult->GetFacePointsInColorSpace(FacePointType::FacePointType_Count, facePoints);
+ hr = pFaceAlignment->get_HeadPivotPoint(&iFacePosition);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ //hr = pFaceFrameResult->get_FaceRotationQuaternion(&faceRotation);
+ hr = pFaceAlignment->get_FaceOrientation(&iFaceRotationQuaternion);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ //hr = pFaceFrameResult->GetFaceProperties(FaceProperty::FaceProperty_Count, faceProperties);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ //hr = GetFaceTextPositionInColorSpace(ppBodies[0], &faceTextLayout);
+ }
+
+ if (SUCCEEDED(hr))
+ {
+ // draw face frame results
+ //m_pDrawDataStreams->DrawFaceFrameResults(0, &faceBox, facePoints, &faceRotation, faceProperties, &faceTextLayout);
+ }
+ }
+
+ SafeRelease(pFaceAlignment);
+ }
+
+ SafeRelease(pFaceFrame);
+ }
+
+ if (bHaveBodyData)
+ {
+ for (int i = 0; i < _countof(bodies); ++i)
+ {
+ SafeRelease(bodies[i]);
+ }
+ }
+}
+
+
diff --git a/tracker-kinect-face/kinect_face_tracker.h b/tracker-kinect-face/kinect_face_tracker.h
new file mode 100644
index 00000000..83b58d71
--- /dev/null
+++ b/tracker-kinect-face/kinect_face_tracker.h
@@ -0,0 +1,121 @@
+/* Copyright (c) 2019, Stephane Lenclud <github@lenclud.com>
+
+ * Permission to use, copy, modify, and/or distribute this
+ * software for any purpose with or without fee is hereby granted,
+ * provided that the above copyright notice and this permission
+ * notice appear in all copies.
+ */
+
+#pragma once
+
+#include <cmath>
+
+#include "api/plugin-api.hpp"
+#include "compat/timer.hpp"
+#include "video/video-widget.hpp"
+
+// Kinect Header files
+#include <Kinect.h>
+#include <Kinect.Face.h>
+
+// @deprecated Use UniqueInterface instead. Remove it at some point.
+template<class Interface>
+inline void SafeRelease(Interface *& pInterfaceToRelease)
+{
+ if (pInterfaceToRelease != nullptr)
+ {
+ pInterfaceToRelease->Release();
+ pInterfaceToRelease = nullptr;
+ }
+}
+
+template<class Interface>
+inline void ReleaseInterface(Interface* pInterfaceToRelease)
+{
+ if (pInterfaceToRelease != nullptr)
+ {
+ pInterfaceToRelease->Release();
+ }
+}
+
+// Safely use Microsoft interfaces.
+template<typename T>
+class UniqueInterface : public std::unique_ptr<T, decltype(&ReleaseInterface<T>)> ///**/
+{
+public:
+ UniqueInterface() : std::unique_ptr<T, decltype(&ReleaseInterface<T>)>(nullptr, ReleaseInterface<T>){}
+ // Access pointer, typically for creation
+ T** PtrPtr() { return &iPtr; };
+ // Called this once the pointer was created
+ void Reset() { std::unique_ptr<T, decltype(&ReleaseInterface<T>)>::reset(iPtr); }
+ // If ever you want to release that interface before the object is deleted
+ void Free() { iPtr = nullptr; Reset(); }
+private:
+ T* iPtr = nullptr;
+};
+
+
+//
+//
+//
+class KinectFaceTracker : public ITracker
+{
+public:
+ KinectFaceTracker();
+ ~KinectFaceTracker() override;
+ module_status start_tracker(QFrame* aFrame) override;
+ void data(double *data) override;
+ bool center() override;
+
+private:
+
+
+ // Kinect stuff
+ void Update();
+ HRESULT InitializeDefaultSensor();
+ void ProcessFaces();
+ HRESULT UpdateBodyData(IBody** ppBodies);
+ void ExtractFaceRotationInDegrees(const Vector4* pQuaternion, float* pPitch, float* pYaw, float* pRoll);
+ static IBody* FindClosestBody(IBody** aBodies);
+ static IBody* FindTrackedBodyById(IBody** aBodies,UINT64 aTrackingId);
+
+ //
+ Timer iTimer;
+
+ // Current Kinect
+ IKinectSensor* iKinectSensor = nullptr;
+
+ // Color reader
+ IColorFrameReader* iColorFrameReader = nullptr;
+
+ // Body reader
+ IBodyFrameReader* iBodyFrameReader = nullptr;
+
+ // Face sources
+ IHighDefinitionFaceFrameSource* iFaceFrameSource = nullptr;
+
+ // Face readers
+ IHighDefinitionFaceFrameReader* iFaceFrameReader = nullptr;
+
+ //
+ RGBQUAD* iColorRGBX = nullptr;
+
+ RectI iFaceBox = { 0 };
+
+ // Face position
+ CameraSpacePoint iLastFacePosition = { 0 };
+ CameraSpacePoint iFacePosition = { 0 };
+ CameraSpacePoint iFacePositionCenter = { 0 };
+
+ Vector4 iFaceRotationQuaternion = { 0 };
+ // As Yaw, Pitch, Roll
+ CameraSpacePoint iLastFaceRotation = { 0 };
+ CameraSpacePoint iFaceRotation = { 0 };
+ CameraSpacePoint iFaceRotationCenter = { 0 };
+ //
+ std::unique_ptr<video_widget> iVideoWidget;
+ std::unique_ptr<QLayout> iLayout;
+
+ // Id of the body currently being tracked
+ UINT64 iTrackingId = 0;
+};
diff --git a/tracker-kinect-face/lang/nl_NL.ts b/tracker-kinect-face/lang/nl_NL.ts
new file mode 100644
index 00000000..fbe86bb9
--- /dev/null
+++ b/tracker-kinect-face/lang/nl_NL.ts
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE TS>
+<TS version="2.1" language="nl_NL">
+<context>
+ <name>KinectFaceMetadata</name>
+ <message>
+ <source>Kinect Face 0.1</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+<context>
+ <name>KinectFaceSettings</name>
+ <message>
+ <source>Kinect license</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+<context>
+ <name>KinectFaceUi</name>
+ <message>
+ <source>Kinect Face Tracker</source>
+ <translation type="unfinished"></translation>
+ </message>
+ <message>
+ <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align=&quot;justify&quot;&gt;Start OpenTrack to check if Kinect Face Tracker is working.&lt;/p&gt;&lt;p align=&quot;justify&quot;&gt;&lt;span style=&quot; font-weight:600;&quot;&gt;Note&lt;/span&gt;: When using OpenTrack with a Kinect for Windows v2 Sensor, Microsoft will collect telemetry data (e.g. operating system, number of processors, graphic chipset, memory, device type, locale, time) in order to improve Microsoft products and services. The data will not be used to identify specific individuals.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+</TS>
diff --git a/tracker-kinect-face/lang/ru_RU.ts b/tracker-kinect-face/lang/ru_RU.ts
new file mode 100644
index 00000000..678fa06c
--- /dev/null
+++ b/tracker-kinect-face/lang/ru_RU.ts
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE TS>
+<TS version="2.1" language="ru_RU">
+<context>
+ <name>KinectFaceMetadata</name>
+ <message>
+ <source>Kinect Face 0.1</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+<context>
+ <name>KinectFaceSettings</name>
+ <message>
+ <source>Kinect license</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+<context>
+ <name>KinectFaceUi</name>
+ <message>
+ <source>Kinect Face Tracker</source>
+ <translation type="unfinished"></translation>
+ </message>
+ <message>
+ <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align=&quot;justify&quot;&gt;Start OpenTrack to check if Kinect Face Tracker is working.&lt;/p&gt;&lt;p align=&quot;justify&quot;&gt;&lt;span style=&quot; font-weight:600;&quot;&gt;Note&lt;/span&gt;: When using OpenTrack with a Kinect for Windows v2 Sensor, Microsoft will collect telemetry data (e.g. operating system, number of processors, graphic chipset, memory, device type, locale, time) in order to improve Microsoft products and services. The data will not be used to identify specific individuals.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+</TS>
diff --git a/tracker-kinect-face/lang/stub.ts b/tracker-kinect-face/lang/stub.ts
new file mode 100644
index 00000000..de0e4f95
--- /dev/null
+++ b/tracker-kinect-face/lang/stub.ts
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE TS>
+<TS version="2.1">
+<context>
+ <name>KinectFaceMetadata</name>
+ <message>
+ <source>Kinect Face 0.1</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+<context>
+ <name>KinectFaceSettings</name>
+ <message>
+ <source>Kinect license</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+<context>
+ <name>KinectFaceUi</name>
+ <message>
+ <source>Kinect Face Tracker</source>
+ <translation type="unfinished"></translation>
+ </message>
+ <message>
+ <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align=&quot;justify&quot;&gt;Start OpenTrack to check if Kinect Face Tracker is working.&lt;/p&gt;&lt;p align=&quot;justify&quot;&gt;&lt;span style=&quot; font-weight:600;&quot;&gt;Note&lt;/span&gt;: When using OpenTrack with a Kinect for Windows v2 Sensor, Microsoft will collect telemetry data (e.g. operating system, number of processors, graphic chipset, memory, device type, locale, time) in order to improve Microsoft products and services. The data will not be used to identify specific individuals.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+</TS>
diff --git a/tracker-kinect-face/lang/zh_CN.ts b/tracker-kinect-face/lang/zh_CN.ts
new file mode 100644
index 00000000..1e1b55d1
--- /dev/null
+++ b/tracker-kinect-face/lang/zh_CN.ts
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!DOCTYPE TS>
+<TS version="2.1" language="zh_CN">
+<context>
+ <name>KinectFaceMetadata</name>
+ <message>
+ <source>Kinect Face 0.1</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+<context>
+ <name>KinectFaceSettings</name>
+ <message>
+ <source>Kinect license</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+<context>
+ <name>KinectFaceUi</name>
+ <message>
+ <source>Kinect Face Tracker</source>
+ <translation type="unfinished"></translation>
+ </message>
+ <message>
+ <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p align=&quot;justify&quot;&gt;Start OpenTrack to check if Kinect Face Tracker is working.&lt;/p&gt;&lt;p align=&quot;justify&quot;&gt;&lt;span style=&quot; font-weight:600;&quot;&gt;Note&lt;/span&gt;: When using OpenTrack with a Kinect for Windows v2 Sensor, Microsoft will collect telemetry data (e.g. operating system, number of processors, graphic chipset, memory, device type, locale, time) in order to improve Microsoft products and services. The data will not be used to identify specific individuals.&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source>
+ <translation type="unfinished"></translation>
+ </message>
+</context>
+</TS>