summaryrefslogtreecommitdiffhomepage
path: root/eyeware-beam-sdk/docs/_sources/api_reference.rst.txt
blob: 1be8039c66d9df902ff1c19c7fe557376d37e867 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
.. toctree::
   :maxdepth: 2

API reference
=============

C++
---

.. doxygenclass:: eyeware::TrackerClient
   :project: Beam_SDK_docs
   :members:

.. doxygenstruct:: eyeware::HeadPoseInfo
   :project: Beam_SDK_docs
   :members:

.. doxygenstruct:: eyeware::ScreenGazeInfo
   :project: Beam_SDK_docs
   :members:

.. doxygenenum:: eyeware::TrackingConfidence
   :project: Beam_SDK_docs

.. doxygenstruct:: eyeware::AffineTransform3D
   :project: Beam_SDK_docs
   :members:

.. doxygentypedef:: eyeware::Matrix3x3
   :project: Beam_SDK_docs

.. doxygenstruct:: eyeware::Vector3D
   :project: Beam_SDK_docs
   :members:

Python
------

.. autoclass:: eyeware.client.TrackerClient
   :members:
   :exclude-members: connected

   .. autoproperty:: connected

      .. versionadded:: 1.1.0

.. autoclass:: eyeware.client.HeadPoseInfo
   :members:

.. autoclass:: eyeware.client.ScreenGazeInfo
   :members:

.. autoclass:: eyeware.client.TrackingConfidence

.. autoclass:: eyeware.client.AffineTransform3D
   :members:

.. autoclass:: eyeware.client.Vector3D
   :members:

.. note::
   Matrix and vector types, such as the rotation and translation properties of ``AffineTransform3D``, can be transformed to NumPy arrays efficiently.
   This is useful for using tracking data and coordinates in your application.
   Example:

   .. code-block:: python

      # Receive an AffineTransform3D instance
      head_pose = tracker.get_head_pose_info()
      # Transform the tracking information to standard NumPy arrays
      import numpy as np
      rotation_numpy = np.array(head_pose.rotation, copy=False)
      translation_numpy = np.array(head_pose.translation, copy=False)
      # Now we can manipulate tracking information to do several things:
      # draw tracking coordinates on the screen, save them for statistics/heatmaps,
      # perform arithmetic operations on them, trigger interactive behaviors based on thresholds, etc.