diff --git a/K4AdotNet.sln b/K4AdotNet.sln new file mode 100644 index 0000000..9ce78ab --- /dev/null +++ b/K4AdotNet.sln @@ -0,0 +1,41 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.28307.572 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "K4AdotNet", "K4AdotNet\K4AdotNet.csproj", "{0646EAF8-8941-4CBE-9D1A-59C643171F3E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "0 - Solution Items", "0 - Solution Items", "{4214990D-F0AF-4206-9A3B-47919B99AB1C}" + ProjectSection(SolutionItems) = preProject + .gitignore = .gitignore + LICENSE = LICENSE + README.md = README.md + EndProjectSection +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "1 - Libraries", "1 - Libraries", "{8A43FD41-5FDD-4106-B41B-FF9C3135D12A}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "2 - Samples", "2 - Samples", "{11E945CD-4D46-4856-8467-72CE65DD4DF7}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "3 - Tests", "3 - Tests", "{B227E80C-8E74-486D-887E-AC31D7A70F96}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {0646EAF8-8941-4CBE-9D1A-59C643171F3E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0646EAF8-8941-4CBE-9D1A-59C643171F3E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0646EAF8-8941-4CBE-9D1A-59C643171F3E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0646EAF8-8941-4CBE-9D1A-59C643171F3E}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {0646EAF8-8941-4CBE-9D1A-59C643171F3E} = {8A43FD41-5FDD-4106-B41B-FF9C3135D12A} + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {E2012927-1A97-4801-B3C9-76EBC40D34F4} + EndGlobalSection +EndGlobal diff --git a/K4AdotNet/Assembly.cs b/K4AdotNet/Assembly.cs new file mode 100644 index 0000000..78e4c83 --- /dev/null +++ b/K4AdotNet/Assembly.cs @@ -0,0 +1,3 @@ +using System; + +[assembly: CLSCompliant(isCompliant: true)] diff --git a/K4AdotNet/Delay.cs b/K4AdotNet/Delay.cs new file mode 100644 index 0000000..544b9f2 --- /dev/null +++ b/K4AdotNet/Delay.cs @@ -0,0 +1,178 @@ +using System; + +namespace K4AdotNet +{ + public struct Delay : + IEquatable, IEquatable, IEquatable, + IComparable, IComparable, IComparable, IComparable, + IFormattable + { + public int ValueUsec; + + public Delay(int valueUsec) + => ValueUsec = valueUsec; + + public Delay(TimeSpan value) + => ValueUsec = checked((int)(value.Ticks / TimeStamp.UsecToTimeSpanTicksFactor)); + + public TimeSpan ToTimeSpan() + => TimeSpan.FromTicks(ValueUsec * TimeStamp.UsecToTimeSpanTicksFactor); + + public bool Equals(Delay other) + => ValueUsec.Equals(other.ValueUsec); + + public bool Equals(TimeSpan other) + => Equals(new Delay(other)); + + public bool Equals(int otherUsec) + => ValueUsec.Equals(otherUsec); + + public int CompareTo(Delay other) + => ValueUsec.CompareTo(other.ValueUsec); + + public int CompareTo(TimeSpan other) + => CompareTo(new Delay(other)); + + public int CompareTo(int otherUsec) + => ValueUsec.CompareTo(otherUsec); + + public int CompareTo(object obj) + { + if (obj is null) + return 1; + if (obj is Delay) + return CompareTo((Delay)obj); + if (obj is TimeSpan) + return CompareTo((TimeSpan)obj); + if (obj is IConvertible) + return CompareTo(Convert.ToInt32(obj)); + throw new ArgumentException("Object is not a Delay or TimeSpan or integer number", nameof(obj)); + } + + public string ToString(string format, IFormatProvider formatProvider) + => ValueUsec.ToString(format, formatProvider) + " usec"; + + public override bool Equals(object obj) + { + if (obj is null) + return false; + if (obj is Delay) + return Equals((Delay)obj); + if (obj is TimeSpan) + return Equals((TimeSpan)obj); + if (obj is IConvertible) + return Equals(Convert.ToInt32(obj)); + return false; + } + + public override int GetHashCode() + => ValueUsec.GetHashCode(); + + public override string ToString() + => ValueUsec.ToString() + " usec"; + + public static bool operator ==(Delay left, Delay right) + => left.Equals(right); + + public static bool operator !=(Delay left, Delay right) + => !left.Equals(right); + + public static bool operator <(Delay left, Delay right) + => left.CompareTo(right) < 0; + + public static bool operator >(Delay left, Delay right) + => left.CompareTo(right) > 0; + + public static bool operator <=(Delay left, Delay right) + => left.CompareTo(right) <= 0; + + public static bool operator >=(Delay left, Delay right) + => left.CompareTo(right) >= 0; + + public static bool operator ==(Delay left, TimeSpan right) + => left.Equals(right); + + public static bool operator !=(Delay left, TimeSpan right) + => !left.Equals(right); + + public static bool operator <(Delay left, TimeSpan right) + => left.CompareTo(right) < 0; + + public static bool operator >(Delay left, TimeSpan right) + => left.CompareTo(right) > 0; + + public static bool operator <=(Delay left, TimeSpan right) + => left.CompareTo(right) <= 0; + + public static bool operator >=(Delay left, TimeSpan right) + => left.CompareTo(right) >= 0; + + public static bool operator ==(TimeSpan left, Delay right) + => new Delay(left).Equals(right); + + public static bool operator !=(TimeSpan left, Delay right) + => !new Delay(left).Equals(right); + + public static bool operator <(TimeSpan left, Delay right) + => new Delay(left).CompareTo(right) < 0; + + public static bool operator >(TimeSpan left, Delay right) + => new Delay(left).CompareTo(right) > 0; + + public static bool operator <=(TimeSpan left, Delay right) + => new Delay(left).CompareTo(right) <= 0; + + public static bool operator >=(TimeSpan left, Delay right) + => new Delay(left).CompareTo(right) >= 0; + + public static bool operator ==(Delay left, int rightUsec) + => left.Equals(rightUsec); + + public static bool operator !=(Delay left, int rightUsec) + => !left.Equals(rightUsec); + + public static bool operator <(Delay left, int rightUsec) + => left.CompareTo(rightUsec) < 0; + + public static bool operator >(Delay left, int rightUsec) + => left.CompareTo(rightUsec) > 0; + + public static bool operator <=(Delay left, int rightUsec) + => left.CompareTo(rightUsec) <= 0; + + public static bool operator >=(Delay left, int rightUsec) + => left.CompareTo(rightUsec) >= 0; + + public static bool operator ==(int leftUsec, Delay right) + => new Delay(leftUsec).Equals(right); + + public static bool operator !=(int leftUsec, Delay right) + => !new Delay(leftUsec).Equals(right); + + public static bool operator <(int leftUsec, Delay right) + => new Delay(leftUsec).CompareTo(right) < 0; + + public static bool operator >(int leftUsec, Delay right) + => new Delay(leftUsec).CompareTo(right) > 0; + + public static bool operator <=(int leftUsec, Delay right) + => new Delay(leftUsec).CompareTo(right) <= 0; + + public static bool operator >=(int leftUsec, Delay right) + => new Delay(leftUsec).CompareTo(right) >= 0; + + public static implicit operator TimeSpan(Delay value) + => value.ToTimeSpan(); + + public static implicit operator Delay(TimeSpan value) + => new Delay(value); + + public static implicit operator int(Delay value) + => value.ValueUsec; + + public static implicit operator Delay(int valueUsec) + => new Delay(valueUsec); + + public static readonly Delay Zero = new Delay(0); + } +} diff --git a/K4AdotNet/Float2.cs b/K4AdotNet/Float2.cs new file mode 100644 index 0000000..4f29f61 --- /dev/null +++ b/K4AdotNet/Float2.cs @@ -0,0 +1,115 @@ +using System; +using System.Runtime.InteropServices; + +namespace K4AdotNet +{ + // Defined in k4atypes.h: + // typedef union + // { + // struct _xy + // { + // float x; /**< X component of a vector. */ + // float y; /**< Y component of a vector. */ + // } xy; /**< X, Y representation of a vector. */ + // float v[2]; /**< Array representation of a vector. */ + // } k4a_float2_t; + /// Two dimensional floating point vector. + [StructLayout(LayoutKind.Sequential)] + public struct Float2 : IEquatable, IFormattable + { + /// X component of a vector. Corresponds to 0 index in array representation. + public float X; + + /// Y component of a vector. Corresponds to 1 index in array representation. + public float Y; + + /// Constructs vector with given components. + /// X component + /// Y component + public Float2(float x, float y) + { + X = x; + Y = y; + } + + public Float2(float[] values) + { + if (values is null) + throw new ArgumentNullException(nameof(values)); + if (values.Length != 2) + throw new ArgumentOutOfRangeException(nameof(values) + "." + nameof(values.Length)); + X = values[0]; + Y = values[1]; + } + + public float[] ToArray() + => new[] { X, Y }; + + /// Indexed access to vector components. + /// Index of component: X - 0, Y - 1. + /// Value of appropriate component. + public float this[int index] + { + get + { + switch (index) + { + case 0: return X; + case 1: return Y; + default: throw new ArgumentOutOfRangeException(nameof(index)); + } + } + + set + { + switch (index) + { + case 0: X = value; break; + case 1: Y = value; break; + default: throw new ArgumentOutOfRangeException(nameof(index)); + } + } + } + + /// Per-component comparison. + /// Other vector to be compared to this one. + /// true if all components are equal. + public bool Equals(Float2 other) + => X.Equals(other.X) && Y.Equals(other.Y); + + public override bool Equals(object obj) + { + if (obj is null || !(obj is Float2)) + return false; + return Equals((Float2)obj); + } + + public static bool operator ==(Float2 left, Float2 right) + => left.Equals(right); + + public static bool operator !=(Float2 left, Float2 right) + => !left.Equals(right); + + public override int GetHashCode() + => X.GetHashCode() ^ Y.GetHashCode(); + + /// Formats vector as [X Y] string. + /// Format string for each individual component in string representation. + /// Culture for formatting numbers to strings. + /// String representation of vector in a given Culture. + public string ToString(string format, IFormatProvider formatProvider) + => $"[{X.ToString(format, formatProvider)} {Y.ToString(format, formatProvider)}]"; + + public override string ToString() + => $"[{X} {Y}]"; + + /// Zero vector. + public static readonly Float2 Zero = new Float2(); + + /// Unit vector in +X direction. + public static readonly Float2 UnitX = new Float2(1, 0); + + /// Unit vector in +Y direction. + public static readonly Float2 UnitY = new Float2(0, 1); + } +} diff --git a/K4AdotNet/Float3.cs b/K4AdotNet/Float3.cs new file mode 100644 index 0000000..1444ce5 --- /dev/null +++ b/K4AdotNet/Float3.cs @@ -0,0 +1,127 @@ +using System; +using System.Runtime.InteropServices; + +namespace K4AdotNet +{ + // Defined in k4atypes.h: + // typedef union + // { + // struct _xyz + // { + // float x; /**< X component of a vector. */ + // float y; /**< Y component of a vector. */ + // float z; /**< Z component of a vector. */ + // } xyz; /**< X, Y, Z representation of a vector. */ + // float v[3]; /**< Array representation of a vector. */ + // } k4a_float3_t; + /// X, Y, Z representation of a vector. + [StructLayout(LayoutKind.Sequential)] + public struct Float3 : IEquatable, IFormattable + { + /// X component of a vector. Corresponds to 0 index in array representation. + public float X; + + /// Y component of a vector. Corresponds to 1 index in array representation. + public float Y; + + /// Z component of a vector. Corresponds to 2 index in array representation. + public float Z; + + /// Constructs vector with given components. + /// X component + /// Y component + /// Z component + public Float3(float x, float y, float z) + { + X = x; + Y = y; + Z = z; + } + + public Float3(float[] values) + { + if (values is null) + throw new ArgumentNullException(nameof(values)); + if (values.Length != 3) + throw new ArgumentOutOfRangeException(nameof(values) + "." + nameof(values.Length)); + X = values[0]; + Y = values[1]; + Z = values[2]; + } + + public float[] ToArray() + => new[] { X, Y, Z }; + + /// Indexed access to vector components. + /// Index of component: X - 0, Y - 1, Z - 2. + /// Value of appropriate component. + public float this[int index] + { + get + { + switch (index) + { + case 0: return X; + case 1: return Y; + case 2: return Z; + default: throw new ArgumentOutOfRangeException(nameof(index)); + } + } + + set + { + switch (index) + { + case 0: X = value; break; + case 1: Y = value; break; + case 2: Z = value; break; + default: throw new ArgumentOutOfRangeException(nameof(index)); + } + } + } + + /// Per-component comparison. + /// Other vector to be compared to this one. + /// true if all components are equal. + public bool Equals(Float3 other) + => X.Equals(other.X) && Y.Equals(other.Y) && Z.Equals(other.Z); + + public override bool Equals(object obj) + { + if (obj is null || !(obj is Float3)) + return false; + return Equals((Float3)obj); + } + + public static bool operator ==(Float3 left, Float3 right) + => left.Equals(right); + + public static bool operator !=(Float3 left, Float3 right) + => !left.Equals(right); + + public override int GetHashCode() + => X.GetHashCode() ^ Y.GetHashCode() ^ Z.GetHashCode(); + + /// Formats vector as [X Y Z] string. + /// Format string for each individual component in string representation. + /// Culture for formatting numbers to strings. + /// String representation of vector in a given Culture. + public string ToString(string format, IFormatProvider formatProvider) + => $"[{X.ToString(format, formatProvider)} {Y.ToString(format, formatProvider)} {Z.ToString(format, formatProvider)}]"; + + public override string ToString() + => $"[{X} {Y} {Z}]"; + + /// Zero vector. + public static readonly Float3 Zero = new Float3(); + + /// Unit vector in +X direction. + public static readonly Float3 UnitX = new Float3(1, 0, 0); + + /// Unit vector in +Y direction. + public static readonly Float3 UnitY = new Float3(0, 1, 0); + + /// Unit vector in +Z direction. + public static readonly Float3 UnitZ = new Float3(0, 0, 1); + } +} diff --git a/K4AdotNet/Float3x3.cs b/K4AdotNet/Float3x3.cs new file mode 100644 index 0000000..c643df0 --- /dev/null +++ b/K4AdotNet/Float3x3.cs @@ -0,0 +1,126 @@ +using System; +using System.Runtime.InteropServices; + +namespace K4AdotNet +{ + // In k4atypes.h it is represented simply as float[9] array. + /// Placeholder for 3x3 matrix data. + [StructLayout(LayoutKind.Sequential)] + public struct Float3x3 : IEquatable + { + public float M11, M12, M13; + public float M21, M22, M23; + public float M31, M32, M33; + + public Float3x3( + float m11, float m12, float m13, + float m21, float m22, float m23, + float m31, float m32, float m33) + { + M11 = m11; M12 = m12; M13 = m13; + M21 = m21; M22 = m22; M23 = m23; + M31 = m31; M32 = m32; M33 = m33; + } + + public Float3x3(float[] values) + { + if (values is null) + throw new ArgumentNullException(nameof(values)); + if (values.Length != 9) + throw new ArgumentOutOfRangeException(nameof(values) + "." + nameof(values.Length)); + M11 = values[0]; M12 = values[1]; M13 = values[2]; + M21 = values[3]; M22 = values[4]; M23 = values[5]; + M31 = values[6]; M32 = values[7]; M33 = values[8]; + } + + public float[] ToArray() + => new[] { M11, M12, M13, M21, M22, M23, M31, M32, M33 }; + + public float this[int index] + { + get + { + switch (index) + { + case 0: return M11; + case 1: return M12; + case 2: return M13; + case 3: return M21; + case 4: return M22; + case 5: return M23; + case 6: return M31; + case 7: return M32; + case 8: return M33; + default: throw new ArgumentOutOfRangeException(nameof(index)); + } + } + + set + { + switch (index) + { + case 0: M11 = value; break; + case 1: M12 = value; break; + case 2: M13 = value; break; + case 3: M21 = value; break; + case 4: M22 = value; break; + case 5: M23 = value; break; + case 6: M31 = value; break; + case 7: M32 = value; break; + case 8: M33 = value; break; + default: throw new ArgumentOutOfRangeException(nameof(index)); + } + } + } + + public float this[int row, int column] + { + get + { + if (row < 0 || row > 2) + throw new ArgumentOutOfRangeException(nameof(row)); + if (column < 0 || column > 2) + throw new ArgumentOutOfRangeException(nameof(column)); + + return this[(row * 3) + column]; + } + + set + { + if (row < 0 || row > 2) + throw new ArgumentOutOfRangeException(nameof(row)); + if (column < 0 || column > 2) + throw new ArgumentOutOfRangeException(nameof(column)); + + this[(row * 3) + column] = value; + } + } + + public bool Equals(Float3x3 other) + => M11.Equals(other.M11) && M12.Equals(other.M12) && M13.Equals(other.M13) + && M21.Equals(other.M21) && M22.Equals(other.M22) && M23.Equals(other.M23) + && M31.Equals(other.M31) && M32.Equals(other.M32) && M33.Equals(other.M33); + + public override bool Equals(object obj) + { + if (obj is null || !(obj is Float3x3)) + return false; + return Equals((Float3x3)obj); + } + + public static bool operator ==(Float3x3 left, Float3x3 right) + => left.Equals(right); + + public static bool operator !=(Float3x3 left, Float3x3 right) + => !left.Equals(right); + + public override int GetHashCode() + => M11.GetHashCode() ^ M12.GetHashCode() ^ M13.GetHashCode() + ^ M21.GetHashCode() ^ M22.GetHashCode() ^ M23.GetHashCode() + ^ M31.GetHashCode() ^ M32.GetHashCode() ^ M33.GetHashCode(); + + public static readonly Float3x3 Zero = new Float3x3(); + + public static readonly Float3x3 Identity = new Float3x3 { M11 = 1f, M22 = 1f, M33 = 1f }; + } +} diff --git a/K4AdotNet/K4AdotNet.csproj b/K4AdotNet/K4AdotNet.csproj new file mode 100644 index 0000000..c2a54d9 --- /dev/null +++ b/K4AdotNet/K4AdotNet.csproj @@ -0,0 +1,31 @@ + + + + netstandard2.0 + 0.0.0 + bibigone + + K4AdotNet + Copyright (c) 2019 + https://github.com/bibigone/k4a.net/blob/master/LICENSE + https://github.com/bibigone/k4a.net/blob/master/README.md + https://github.com/bibigone/k4a.net + Kinect for Azure .Net Wrapper + + + + K4AdotNet.xml + Off + 1701;1702;1591 + + + + K4AdotNet.xml + Off + + + + + + + diff --git a/K4AdotNet/K4AdotNet.xml b/K4AdotNet/K4AdotNet.xml new file mode 100644 index 0000000..4b682ab --- /dev/null +++ b/K4AdotNet/K4AdotNet.xml @@ -0,0 +1,1476 @@ + + + + K4AdotNet + + + + Two dimensional floating point vector. + + + X component of a vector. Corresponds to 0 index in array representation. + + + Y component of a vector. Corresponds to 1 index in array representation. + + + Constructs vector with given components. + X component + Y component + + + Indexed access to vector components. + Index of component: X - 0, Y - 1. + Value of appropriate component. + + + Per-component comparison. + Other vector to be compared to this one. + true if all components are equal. + + + Formats vector as [X Y] string. + Format string for each individual component in string representation. + Culture for formatting numbers to strings. + String representation of vector in a given Culture. + + + Zero vector. + + + Unit vector in +X direction. + + + Unit vector in +Y direction. + + + X, Y, Z representation of a vector. + + + X component of a vector. Corresponds to 0 index in array representation. + + + Y component of a vector. Corresponds to 1 index in array representation. + + + Z component of a vector. Corresponds to 2 index in array representation. + + + Constructs vector with given components. + X component + Y component + Z component + + + Indexed access to vector components. + Index of component: X - 0, Y - 1, Z - 2. + Value of appropriate component. + + + Per-component comparison. + Other vector to be compared to this one. + true if all components are equal. + + + Formats vector as [X Y Z] string. + Format string for each individual component in string representation. + Culture for formatting numbers to strings. + String representation of vector in a given Culture. + + + Zero vector. + + + Unit vector in +X direction. + + + Unit vector in +Y direction. + + + Unit vector in +Z direction. + + + Placeholder for 3x3 matrix data. + + + Result code returned by Azure Kinect APIs. + + + The result was successful + + + The result was a failure + + + The input buffer was too small + + + Result code returned by Azure Kinect APIs. + + + The result was successful + + + The result was a failure + + + Result code returned by Azure Kinect APIs. + + + The result was successful + + + The result was a failure + + + The operation timed out + + + Handle to an Azure Kinect capture. + + Empty captures are created with k4a_capture_create(). + Captures can be obtained from a device using k4a_device_get_capture(). + + + + + Call this method if you want to have one more reference to the same capture. + + Additional reference to the same capture. Don't forget to call Dispose() method for object returned. + + + Handle to an Azure Kinect device. + + + Interops to some native functions from k4a.h header file. + + + Add a reference to a capture. + Capture to add a reference to. + Call this function to add an additional reference to a capture. + This reference must be removed with . + + + Release a capture. + Capture to release. + + + Add a reference to the image. + Handle of the image for which the get operation is performed on. + + References manage the lifetime of the object. When the references reach zero the object is destroyed. A caller must + not access the object after its reference is released. + + + + Remove a reference from the image. + Handle of the image for which the get operation is performed on. + + References manage the lifetime of the object. When the references reach zero the object is destroyed. A caller must + not access the object after its reference is released. + + + + Destroy transformation handle. + Transformation handle to destroy. + + + Closes an Azure Kinect device. + Handle of device for which the get operation is performed on. + Once closed, the handle is no longer valid. + + + Base class for all native handles declared it Sensor SDK. + + Handles represent object instances in Sensor SDK. + Handles are opaque pointers returned by the SDK which represent an object. + Invalid handles are set to 0 (IntPtr.Zero). + + + + Instances always own handles they store. + + + Invalid handle is IntPtr.Zero. + + + Handle to an Azure Kinect image. + Images from a device are retrieved through a k4a_capture_t object returned by k4a_device_get_capture(). + + + Call this method if you want to have one more reference to the same image. + Additional reference to the same image. Don't forget to call Dispose() method for object returned. + + + Handle to an Azure Kinect transformation context. + Handles are created with k4a_transformation_create(). + + + Name of Kinect for Azure Sensor SDK DLL. + + + Information about device calibration in particular depth mode and color resolution. + + + Depth camera calibration. + + + Color camera calibration. + + + Extrinsic transformation parameters. + + The extrinsic parameters allow 3D coordinate conversions between depth camera, color camera, the IMU's gyroscope + and accelerometer.To transform from a source to a target 3D coordinate system, use the parameters stored + under Extrinsics[source * (int)CalibrationSensor.Count + target]. + + + + + Depth camera mode for which calibration was obtained. + + + Color camera resolution for which calibration was obtained. + + + Extrinsic calibration defines the physical relationship between two separate sensors inside Kinect for Azure device. + + + Rotation matrix. + + + Translation vector. + + + Kinect for Azure device consists of different sensors each of them has their own coordinate system and calibration extrinsics. + + + Calibration type is unknown. + + + Depth sensor. + + + Color sensor. + + + Gyroscope sensor. + + + Accelerometer sensor. + + + Number of types excluding unknown type. + + + Intrinsic calibration represents the internal optical properties of the camera. + Azure Kinect devices are calibrated with Brown Conrady which is compatible with OpenCV. + + + Principal point in image, x. Corresponding index in array: 0. + + + Principal point in image, y. Corresponding index in array: 1. + + + Focal length x. Corresponding index in array: 2. + + + Focal length y. Corresponding index in array: 3. + + + k1 radial distortion coefficient. Corresponding index in array: 4. + + + kw radial distortion coefficient. Corresponding index in array: 5. + + + k3 radial distortion coefficient. Corresponding index in array: 6. + + + k4 radial distortion coefficient. Corresponding index in array: 7. + + + k5 radial distortion coefficient. Corresponding index in array: 8. + + + k6 radial distortion coefficient. Corresponding index in array: 9. + + + Center of distortion in Z=1 plane, x (only used for Rational6KT). Corresponding index in array: 10. + + + Center of distortion in Z=1 plane, y (only used for Rational6KT). Corresponding index in array: 11. + + + Tangential distortion coefficient 2. Corresponding index in array: 12. + + + Tangential distortion coefficient 1. Corresponding index in array: 13. + + + Metric radius. Corresponding index in array: 14. + + + Array representation of intrinsic model parameters. + + + Camera sensor intrinsic calibration data. + + Intrinsic calibration represents the internal optical properties of the camera. + + Azure Kinect devices are calibrated with Brown Conrady which is compatible with OpenCV. + + + + Type of calibration model used. + + + Number of valid entries in . + + + Calibration parameters. + + + The model used to interpret the calibration parameters. + + + Calibration model is unknown. + + + Calibration model is Theta (arctan). + + + Calibration model Polynomial 3K. + + + Calibration model Rational 6KT. + + + Calibration model Brown Conrady (compatible with OpenCV). + + + Camera calibration contains intrinsic and extrinsic calibration information for depth/color camera. + + + Extrinsic calibration data. + + + Intrinsic calibration data. + + + Resolution width of the camera. + + + Resolution height of the camera. + + + Max FOV of the camera. + + + Color sensor control commands. + + Control values set on a device are reset only when the device is power cycled. The device will retain the settings + even if the device is closed or the application is restarted. + + + + Exposure time setting. + + May be set to or . + Exposure time is measured in microseconds. + + + + Exposure or Framerate priority setting. + + May only be set to . + Value of 0 means framerate priority. Value of 1 means exposure priority. + Using exposure priority may impact the framerate of both the color and depth cameras. + Deprecated starting in 1.1.0. Please discontinue usage, firmware does not support this. + + + + Brightness setting. + + May only be set to . + The valid range is 0 to 255. The default value is 128. + + + + Contrast setting. + + May only be set to . + + + + Saturation setting. + + May only be set to . + + + + Sharpness setting. + + May only be set to . + + + + White balance setting. + + May be set to or . + The unit is degrees Kelvin. The setting must be set to a value evenly divisible by 10 degrees. + + + + Backlight compensation setting. + + May only be set to . + Value of 0 means backlight compensation is disabled. Value of 1 means backlight compensation is enabled. + + + + Gain setting. + + May only be set to . + + + + Powerline frequency setting. + + May only be set to . + Value of 1 sets the powerline compensation to 50 Hz. Value of 2 sets the powerline compensation to 60 Hz. + + + + Color sensor control mode. + + + set the associated to auto mode + + + set the associated to manual mode + + + Color sensor resolutions. + + + + Color camera will be turned off with this setting + + + 1280x720 16:9 + + + 1920x1080 16:9 + + + 2560x1440 16:9 + + + 2048x1536 4:3 + + + 3840x2160 16:9 + + + 4096x3072 4:3 + + + Helper extension methods for enumeration. + + + Returns image width in pixels for a given resolution. + + + Returns image height in pixels for a given resolution. + + + Depth sensor capture modes. + + See the hardware specification for additional details on the field of view, and supported frame rates + for each mode. + + Binned modes reduce the captured camera resolution by combining adjacent sensor pixels into a bin. + + + + + Depth sensor will be turned off with this setting. + + + Depth captured at 320x288. Passive IR is also captured at 320x288. + + + Depth captured at 640x576. Passive IR is also captured at 640x576. + + + Depth captured at 512x512. Passive IR is also captured at 512x512. + + + Depth captured at 1024x1024. Passive IR is also captured at 1024x1024. + + + Passive IR only, captured at 1024x1024. + + + Helper extension methods for enumeration. + + + Returns depth and IR images width in pixels for a given depth mode. + + + Returns depth and IR images height in pixels for a given depth mode. + + + Is depth mode has wide field of view? + + + Does depth mode use binning for smoothing/filtering? + + + Configuration parameters for an Azure Kinect device. + + + Image format to capture with the color camera. + + The color camera does not natively produce BGRA32 images. + Setting value will result in higher CPU utilization. + + + + Image resolution to capture with the color camera. + + + Capture mode for the depth camera. + + + Desired frame rate for the color and depth camera. + + + Only produce capture objects if they contain synchronized color and depth images. + + This setting controls the behavior in which images are dropped when images are produced faster than they can be + read, or if there are errors in reading images from the device. + + If set to , capture objects will only be produced with both color and depth images. + If set to , capture objects may be produced only a single image when the corresponding image is dropped. + + Setting this to ensures that the caller receives all of the images received from the camera, regardless of + whether the corresponding images expected in the capture are available. + + If either the color or depth camera are disabled, this setting has no effect. + + + + Desired delay between the capture of the color image and the capture of the depth image. + + A negative value indicates that the depth image should be captured before the color image. + Any value between negative and positive one capture period is valid. + + + + The external synchronization mode. + + + The external synchronization timing. + + If this camera is a subordinate, this sets the capture delay between the color camera capture and the external + input pulse. A setting of zero indicates that the master and subordinate color images should be aligned. + + This setting does not effect the 'Sync out' connection. + + This value must be positive and range from zero to one capture period. + + If this is not a subordinate, then this value is ignored. + + This field has uint32_t type in C structure k4a_device_configuration_t which corresponds to type in .Net. + But to be CLS-complaint, this field has type. + It must be OK for all practical usages. + + + + Streaming indicator automatically turns on when the color or depth camera's are in use. + This setting disables that behavior and keeps the LED in an off state. + + + Initial configuration setting for disabling all sensors. + Use this setting to initialize a to a disabled state. + + + Interops to most of native functions from k4a.h header file. + + + Default device index. + Passed as an argument to to open the default sensor. + + + Gets the number of connected devices. + Number of sensors connected to the PC. + + + Open an Azure Kinect device. + The index of the device to open, starting with 0. Use constant as value for this parameter to open default device. + Output parameter which on success will return a handle to the device. + if the device was opened successfully. + + + Reads a sensor capture. + Handle obtained by . + If successful this contains a handle to a capture object. + + Specifies the time the function should block waiting for the capture. + If set to , the function will return without blocking. + Passing will block indefinitely until data is available, the + device is disconnected, or another error occurs. + + + if a capture is returned. + If a capture is not available before the timeout elapses, the function will return . + All other failures will return . + + + This function needs to be called while the device is in a running state; + after is called and before is called. + + + + Reads an IMU sample. + Handle obtained by . + Information about IMU sample. + + Specifies the time the function should block waiting for the sample. + If set to , the function will return without blocking. + Passing will block indefinitely until data is available, the + device is disconnected, or another error occurs. + + + if a sample is returned. + If a sample is not available before the timeout elapses, the function will return . + All other failures will return . + + + This function needs to be called while the device is in a running state; + after is called and before is called. + + + + Create an empty capture object. + Output parameter which on success will return a handle to the capture. + if the device was opened successfully. + + + Get the color image associated with the given capture. + Capture handle containing the image. + Image handle. + Call this function to access the color image part of this capture. + + + Get the depth image associated with the given capture. + Capture handle containing the image. + Image handle. + Call this function to access the depth image part of this capture. + + + Get the IR image associated with the given capture. + Capture handle containing the image. + Image handle. + Call this function to access the IR image part of this capture. + + + Set or add a color image to the associated capture. + Capture handle to hold the image. + Image handle containing the image or to remove color image from a given capture if any. + If there is already a color image contained in the capture, the existing image will be dereferenced and replaced with the new image. + + + Set or add a depth image to the associated capture. + Capture handle to hold the image. + Image handle containing the image or to remove depth image from a given capture if any. + If there is already a depth image contained in the capture, the existing image will be dereferenced and replaced with the new image. + + + Set or add a IR image to the associated capture. + Capture handle to hold the image. + Image handle containing the image or to remove IR image from a given capture if any. + If there is already a IR image contained in the capture, the existing image will be dereferenced and replaced with the new image. + + + Set the temperature associated with the capture. + Capture handle to set the temperature on. + Temperature in Celsius to store. + + + Get the temperature associated with the capture. + Capture handle to retrieve the temperature from. + + This function returns the temperature of the device at the time of the capture in Celsius. + If the temperature is unavailable, the function will return . + + + + + Create an image. + + The format of the image that will be stored in this image container. + Width in pixels. + Height in pixels. + The number of bytes per horizontal line of the image. + Handle of created image in case of success. + on success. + This function is used to create images of formats that have consistent stride. + The function is not suitable for compressed formats that may not be represented by the same number of bytes per line. + The function will allocate an image buffer of size * bytes. + To create an image object without the API allocating memory, or to represent an image that has a non-deterministic + stride, use . + + + + Callback function for a memory object being destroyed. + The buffer pointer that was supplied by the caller. + The context for the memory object that needs to be destroyed that was supplied by the caller. + + + Create an image from a pre-allocated buffer. + The format of the image that will be stored in this image container. + Width in pixels. + Height in pixels. + The number of bytes per horizontal line of the image. + Pointer to a pre-allocated image buffer. + Size in bytes of the pre-allocated image buffer. + + Callback to the buffer free function, called when all references to the buffer have been released. + This parameter is optional (can be ). + + Context for the buffer free function. This value will be called as 2nd parameter to + when the callback is invoked. + + Handle of created image in case of success. + on success. + + This function creates an from a pre-allocated buffer. When all references to this object reach zero + the provided callback function is called so that the memory can be released. + + + + Get the image buffer. + Handle of the image for which the get operation is performed on. + + The function will return if there is an error, and will normally return a pointer to the image buffer. + + Use this buffer to access the raw image data. + + + Get the image buffer size. + Handle of the image for which the get operation is performed on. + The function will return if there is an error, and will normally return the image size. + Use this function to know what the size of the image buffer is returned by . + + + Get the format of the image. + Handle of the image for which the get operation is performed on. + + This function is not expected to fail, all images are created with a known format. + If the is invalid, the function will return . + + Use this function to determine the format of the image buffer. + + + Get the image width in pixels. + Handle of the image for which the get operation is performed on. + + This function is not expected to fail, all images are created with a known width. + If the is invalid, the function will return 0. + + + + Get the image height in pixels. + Handle of the image for which the get operation is performed on. + + This function is not expected to fail, all images are created with a known height. + If the is invalid, the function will return 0. + + + + Get the image stride in bytes. + Handle of the image for which the get operation is performed on. + + This function is not expected to fail, all images are created with a known stride. + If the is invalid or the image's format does not have a stride, the function will return 0. + + + + Get the image time stamp. + Handle of the image for which the get operation is performed on. + + If the is invalid or if no time stamp was set for the image, + this function will return . + It is also possible for to be a valid time stamp originating from the beginning + of a recording or the start of streaming. + + + Returns the time stamp of the image. Time stamps are recorded by the device and represent the mid-point of exposure. + They may be used for relative comparison, but their absolute value has no defined meaning. + + + + Get the image exposure in microseconds. + Handle of the image for which the get operation is performed on. + + If the is invalid or if no exposure was set for the image, + this function will return 0. Otherwise, + it will return the image exposure time in microseconds. + + Returns an exposure time in microseconds. This is only supported on color image formats. + + + Get the image white balance. + Handle of the image for which the get operation is performed on. + + Returns the image white balance in Kelvin. If is invalid, or the white balance was not set or + not applicable to the image, the function will return 0. + + Returns the image's white balance. This function is only valid for color captures, and not for depth or IR captures. + + + Get the image ISO speed. + Handle of the image for which the get operation is performed on. + + Returns the ISO speed of the image. 0 indicates the ISO speed was not available or an error occurred. + + This function is only valid for color captures, and not for depth or IR captures. + + + Set the time stamp, in microseconds, of the image. + Handle of the image to set the timestamp on. + Time stamp of the image. + + Use this function in conjunction with + or to construct an image. + + + + Set the exposure time, in microseconds, of the image. + Handle of the image to set the exposure time on. + Exposure time of the image in microseconds. + + Use this function in conjunction with + or to construct an image. + + + + Set the white balance of the image. + Handle of the image to set the white balance on. + White balance of the image in degrees Kelvin. + + Use this function in conjunction with + or to construct an image. + + + + Set the ISO speed of the image. + Handle of the image to set the ISO speed on. + ISO speed of the image. + + Use this function in conjunction with + or to construct an image. + + + + Starts color and depth camera capture. + Handle obtained by . + The configuration we want to run the device in. This can be initialized with . + is returned on success. + + Individual sensors configured to run will now start to stream captured data. + + It is not valid to call this method a second time on the same device until has been called. + + + + Stops the color and depth camera capture. + Handle obtained by . + + The streaming of individual sensors stops as a result of this call. Once called, + may be called again to resume sensor streaming. + + This function may be called while another thread is blocking in . + Calling this function while another thread is in that function will result in that function returning a failure. + + + + Starts the IMU sample stream. + Handle obtained by . + is returned on success. + + Call this API to start streaming IMU data. It is not valid to call this function a second time on the same + device until has been called. + + This function is dependent on the state of the cameras. The color or depth camera must be started before the IMU. + will be returned if one of the cameras is not running. + + + + Stops the IMU capture. + Handle obtained by . + + The streaming of the IMU stops as a result of this call. Once called, may + be called again to resume sensor streaming, so long as the cameras are running. + + This function may be called while another thread is blocking in . + Calling this function while another thread is in that function will result in that function returning a failure. + + + + Get the Azure Kinect device serial number. + Handle obtained by . + + Location to write the serial number to. If the function returns , + this will be a NULL-terminated string of ASCII characters. + If this input is , will still be updated to return + the size of the buffer needed to store the string. + + + On input, the size of the buffer if that pointer is not . + On output, this value is set to the actual number of bytes in the serial number (including the null terminator). + + + A return of means that the has been filled in. + If the buffer is too small the function returns and the size of the serial number is + returned in the parameter. + All other failures return . + + + + Get the version numbers of the device's subsystems. + Handle obtained by . + Output parameter which on success will return version info. + + A return of means that the version structure has been filled in. + All other failures return . + + + + Get the Azure Kinect color sensor control capabilities. + Handle obtained by . + Color sensor control command. + Output: whether the color sensor's control support auto mode or not. if it supports auto mode, otherwise . + Output: the color sensor's control minimum value of . + Output: the color sensor's control maximum value of . + Output: the color sensor's control step value of . + Output: the color sensor's control default value of . + Output: the color sensor's control default mode of . + if the value was successfully returned, if an error occurred + + + Get the Azure Kinect color sensor control value. + Handle obtained by . + Color sensor control command. + This mode represents whether the command is in automatic or manual mode. + This value is always written, but is only valid when the returned is for the current . + if the value was successfully returned, if an error occurred. + + Each control command may be set to manual or automatic. See the definition of on + how to interpret the for each command. + + Some control commands are only supported in manual mode. When a command is in automatic mode, the for + that command is not valid. + + Control values set on a device are reset only when the device is power cycled. The device will retain the + settings even if the is closed or the application is restarted. + + + + Set the Azure Kinect color sensor control value. + Handle obtained by . + Color sensor control command. + Color sensor control mode to set. This mode represents whether the command is in automatic or manual mode. + + Value to set the color sensor's control to. The value is only valid if + is set to , and is otherwise ignored. + + if the value was successfully set, if an error occurred + + Each control command may be set to manual or automatic. See the definition of on how + to interpret the for each command. + + Some control commands are only supported in manual mode. When a command is in automatic mode, the for that + command is not valid. + + Control values set on a device are reset only when the device is power cycled. The device will retain the settings + even if the device is closed or the application is restarted. + + + + Get the raw calibration blob for the entire Azure Kinect device. + Handle obtained by . + + Location to write the calibration data to. This field may optionally be set to for the caller to query for + the needed data size. + + + On passing into the function this variable represents the available size of the + buffer. On return this variable is updated with the amount of data actually written to the buffer, or the size + required to store the calibration buffer if is . + + + if was successfully written. + If points to a buffer size that is + too small to hold the output or data is , is returned + and is updated to contain the minimum buffer size needed to capture the calibration data. + + + + Get the camera calibration for the entire Azure Kinect device. + Handle obtained by . + Mode in which depth camera is operated. + Resolution in which color camera is operated. + Output: calibration data. + if was successfully written. otherwise. + + The represents the data needed to transform between the camera views and may be + different for each operating and the device is configured to operate in. + + The output is used as input to all calibration and transformation functions. + + + + Get the device jack status for the synchronization in and synchronization out connectors. + Handle obtained by . + Upon successful return this value will be set to true if a cable is connected to this sync in jack. + Upon successful return this value will be set to true if a cable is connected to this sync out jack. + if the connector status was successfully read. + + If is then mode can be set to + or . If is then + mode can be set to or . + + + + Get the camera calibration for a device from a raw calibration blob. + Raw calibration blob obtained from a device or recording. The raw calibration must be NULL terminated. + The size, in bytes, of including the NULL termination. + Mode in which depth camera is operated. + Resolution in which color camera is operated. + Result: calibration data + if was successfully written. otherwise. + + The represents the data needed to transform between the camera views and is + different for each operating and the device is configured to operate in. + + The output is used as input to all transformation functions. + + + + Transform a 3D point of a source coordinate system into a 3D point of the target coordinate system. + Camera calibration data. + The 3D coordinates in millimeters representing a point in . + The current camera. + The target camera. + Output: the new 3D coordinates of the input point in the coordinate space in millimeters. + + if was successfully written. + if contained invalid transformation parameters. + + + This function is used to transform 3D points between depth and color camera coordinate systems. The function uses the + extrinsic camera calibration. It computes the output via multiplication with a precomputed matrix encoding a 3D + rotation and a 3D translation. If and are the same, then will + be identical to . + + + + + Transform a 2D pixel coordinate with an associated depth value of the source camera + into a 3D point of the target coordinate system. + + Camera calibration data. + The 2D pixel in coordinates. + The depth of in millimeters. + The current camera. + The target camera. + Output: the 3D coordinates of the input pixel in the coordinate system of in millimeters. + + The output parameter returns a value of if the is a valid coordinate, + and will return if the coordinate is not valid in the calibration model. + + + if was successfully written. + if + contained invalid transformation parameters. + If the function returns , but valid is , + the transformation was computed, but the results in are outside of the range of valid + calibration and should be ignored. + + + This function applies the intrinsic calibration of to compute the 3D ray from the focal point of the + camera through pixel .The 3D point on this ray is then found using . If + is different from , the 3D point is transformed to using + . + In practice, and will often be identical. In this + case, no 3D to 3D transformation is applied. + + If is not considered as valid pixel coordinate + according to the intrinsic camera model, is set to . + If it is valid, valid will be set to . The user + should not use the value of if was set to . + + + + Transform a 3D point of a source coordinate system into a 2D pixel coordinate of the target camera. + Camera calibration data. + The 3D coordinates in millimeters representing a point in . + The current camera. + The target camera. + Output: the 2D pixel in coordinates. + + The output parameter returns if the is a valid coordinate in the + coordinate system, and will return if the coordinate is not valid in the calibration model. + + + if was successfully written. + if contained invalid transformation parameters. + If the function returns , but is , + the transformation was computed, but the results in are outside of the range of valid calibration + and should be ignored. + + + If is different from , is transformed + to using . + In practice, and will often be identical. + In this case, no 3D to 3D transformation is applied. The 3D point in the coordinate system of is then + projected onto the image plane using the intrinsic calibration of . + + If does not map to a valid 2D coordinate in the coordinate system, + is set to . If it is valid, will be set to . + The user should not use the value of if was set to . + + + + + Transform a 2D pixel coordinate with an associated depth value of the source camera into a 2D pixel coordinate of the target camera. + + Camera calibration data. + The 2D pixel in coordinates. + The depth of in millimeters. + The current camera. + The target camera. + Output: the 2D pixel in coordinates. + + The output parameter returns if the is a valid coordinate in the + coordinate system, and will return if the coordinate is not valid in the calibration model. + + + if was successfully written. + if contained invalid transformation parameters. + If the function returns , but valid is , + the transformation was computed, but the results in are outside of the range of valid calibration + and should be ignored. + + + This function maps a pixel between the coordinate systems of the depth and color cameras. It is equivalent to calling + to compute the 3D point corresponding to and then using + to map the 3D point into the coordinate system of the . + + If and are identical, the function immediately sets to + and returns without computing any transformations. + + If does not map to a valid 2D coordinate in the coordinate system, + is set to . If it is valid, will be set to . + The user should not use the value of if was set to 0. + + + + Get handle to transformation. + Camera calibration data. + A transformation handle. An invalid handle is returned if creation fails. + + The transformation handle is used to transform images from the coordinate system of one camera into the other. Each + transformation handle requires some pre-computed resources to be allocated, which are retained until the handle is + destroyed. + + + + Transforms the depth map into the geometry of the color camera. + Transformation handle. + Handle to input depth image. + Handle to output transformed depth image. + + if was successfully written + and otherwise. + + + This produces a depth image for which each pixel matches the corresponding pixel coordinates of the color camera. + + and must be of format . + + must have a width and height matching the width and height of the color camera in the mode + specified by the used to create the with . + + The contents will be filled with the depth values derived from in the color + camera's coordinate space. + + should be created by the caller using + or . + + + + Transforms a color image into the geometry of the depth camera. + Transformation handle. + Handle to input depth image. + Handle to input color image. + Handle to output transformed color image. + + if was successfully written + and otherwise. + + + This produces a color image for which each pixel matches the corresponding pixel coordinates of the depth camera. + + and need to represent the same moment in time. The depth data will be applied to the + color image to properly warp the color data to the perspective of the depth camera. + + must be of type . must be of format + . + + image must be of format . must + have the width and height of the depth camera in the mode specified by the used to create + the with . + + should be created by the caller using + or . + + + + Transforms the depth image into 3 planar images representing X, Y and Z-coordinates of corresponding 3D points. + Transformation handle. + Handle to input depth image. + Geometry in which depth map was computed. + Handle to output xyz image. + + if was successfully written + and otherwise. + + + must be of format . + + The parameter tells the function what the perspective of the is. + If the was captured directly from the depth camera, the value should be . + If the is the result of a transformation into the color camera's coordinate space using + , + the value should be . + + The format of must be . The width and height of must match the + width and height of . must have a stride in bytes of at least 6 times its width in pixels. + + Each pixel of the consists of three values, totaling 6 bytes. The three values are the + X, Y, and Z values of the point. + + should be created by the caller using + or . + + + + Firmware build type. + + + Production firmware. + + + Pre-production firmware. + + + Firmware signature type. + + + Microsoft signed firmware. + + + Test signed firmware. + + + Unsigned firmware. + + + + Version information about sensor firmware. + + + + Major version; represents a breaking change. + + + Minor version; represents additional features, no regression from lower versions with same major version. + + + Reserved. + + + Creates version with specified components. + Value for field . + Value for field . + Value for field . + + + Per-component comparison of versions. + Version to be compared with this one. + true - versions are the same, false - versions are differ from each other. + + + Color and depth sensor frame rate. + + This enumeration is used to select the desired frame rate to operate the cameras. The actual + frame rate may vary slightly due to dropped data, synchronization variation between devices, + clock accuracy, or if the camera exposure priority mode causes reduced frame rate. + + + + + Five (5) frames per second. + + + Fifteen (15) frames per second. + + + Thirty (30) frames per second. + + + Helper extension and static methods for enumeration. + + + Convert enumeration value to appropriate number of frames per second (Hz). + + + Constructs enumeration value from appropriate number of frames per second (Hz). + + + Structure to define hardware version. + + + Color camera firmware version. + + + Depth camera firmware version. + + + Audio device firmware version. + + + Depth sensor firmware version. + + + Build type reported by the firmware. + + + Signature type of the firmware. + + + Image format type. + + + + The buffer for each image is encoded as a JPEG and can be decoded by a JPEG decoder. + + + Because the image is compressed, the stride parameter is not applicable. + Each MJPG encoded image in a stream may be of differing size depending on the compression efficiency. + + + + + NV12 images separate the luminance and chroma data such that all the luminance is at the + beginning of the buffer, and the chroma lines follow immediately after. + + + Stride indicates the length of each line in bytes and should be used to determine the start location of each line + of the image in memory. Chroma has half as many lines of height and half the width in pixels of the luminance. + Each chroma line has the same width in bytes as a luminance line. + + + + + YUY2 stores chroma and luminance data in interleaved pixels. + + + Stride indicates the length of each line in bytes and should be used to determine the start location of each + line of the image in memory. + + + + + Each pixel of BGRA32 data is four bytes. The first three bytes represent Blue, Green, + and Red data. The fourth byte is the alpha channel and is unused in the Azure Kinect APIs. + + + Stride indicates the length of each line in bytes and should be used to determine the start location of each + line of the image in memory. + + The Azure Kinect device does not natively capture in this format. Requesting images of this format + requires additional computation in the API. + + + + + Each pixel of DEPTH16 data is two bytes of little endian unsigned depth data. The unit of the data is in + millimeters from the origin of the camera. + + + Stride indicates the length of each line in bytes and should be used to determine the start location of each + line of the image in memory. + + + + + This format represents infrared light and is captured by the depth camera. + Each pixel of IR16 data is two bytes of little endian unsigned depth data. The value of the data represents + brightness. + + + Stride indicates the length of each line in bytes and should be used to determine the start location of each + line of the image in memory. + + + + + Custom image format. + Used in conjunction with user created images or images packing non-standard data. + + + See the originator of the custom formatted image for information on how to interpret the data. + + + + IMU sample. + + + Temperature reading of this sample (Celsius). + + + Accelerometer sample in meters per second squared. + + + Time stamp of the accelerometer. + + + Gyro sample in radians per second. + + + Time stamp of the gyroscope in microseconds. + + + Synchronization mode when connecting two or more devices together. + + + Neither 'Sync In' or 'Sync Out' connections are used. + + + The 'Sync Out' jack is enabled and synchronization data it driven out the connected wire. + + + + The 'Sync In' jack is used for synchronization and 'Sync Out' is driven for the + next device in the chain. 'Sync Out' is a mirror of 'Sync In' for this mode. + + + + diff --git a/K4AdotNet/NativeApiCallResults/BufferResult.cs b/K4AdotNet/NativeApiCallResults/BufferResult.cs new file mode 100644 index 0000000..031697e --- /dev/null +++ b/K4AdotNet/NativeApiCallResults/BufferResult.cs @@ -0,0 +1,22 @@ +namespace K4AdotNet.NativeApiCallResults +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_BUFFER_RESULT_SUCCEEDED = 0, + // K4A_BUFFER_RESULT_FAILED, + // K4A_BUFFER_RESULT_TOO_SMALL, + // } k4a_buffer_result_t; + /// Result code returned by Azure Kinect APIs. + internal enum BufferResult + { + /// The result was successful + Succeeded = 0, + + /// The result was a failure + Failed, + + /// The input buffer was too small + TooSmall, + } +} diff --git a/K4AdotNet/NativeApiCallResults/Result.cs b/K4AdotNet/NativeApiCallResults/Result.cs new file mode 100644 index 0000000..6464699 --- /dev/null +++ b/K4AdotNet/NativeApiCallResults/Result.cs @@ -0,0 +1,18 @@ +namespace K4AdotNet.NativeApiCallResults +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_RESULT_SUCCEEDED = 0, + // K4A_RESULT_FAILED, + // } k4a_result_t; + /// Result code returned by Azure Kinect APIs. + internal enum Result + { + /// The result was successful + Succeeded = 0, + + /// The result was a failure + Failed, + } +} diff --git a/K4AdotNet/NativeApiCallResults/WaitResult.cs b/K4AdotNet/NativeApiCallResults/WaitResult.cs new file mode 100644 index 0000000..a84d3c6 --- /dev/null +++ b/K4AdotNet/NativeApiCallResults/WaitResult.cs @@ -0,0 +1,22 @@ +namespace K4AdotNet.NativeApiCallResults +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_WAIT_RESULT_SUCCEEDED = 0, + // K4A_WAIT_RESULT_FAILED, + // K4A_WAIT_RESULT_TIMEOUT, + // } k4a_wait_result_t; + /// Result code returned by Azure Kinect APIs. + internal enum WaitResult + { + /// The result was successful + Succeeded = 0, + + /// The result was a failure + Failed, + + /// The operation timed out + Timeout, + } +} diff --git a/K4AdotNet/NativeHandles/CaptureHandle.cs b/K4AdotNet/NativeHandles/CaptureHandle.cs new file mode 100644 index 0000000..cfc10d2 --- /dev/null +++ b/K4AdotNet/NativeHandles/CaptureHandle.cs @@ -0,0 +1,31 @@ +namespace K4AdotNet.NativeHandles +{ + // Defined in k4atypes.h: + // K4A_DECLARE_HANDLE(k4a_capture_t); + /// Handle to an Azure Kinect capture. + /// + /// Empty captures are created with k4a_capture_create(). + /// Captures can be obtained from a device using k4a_device_get_capture(). + /// + internal sealed class CaptureHandle : HandleBase + { + private CaptureHandle() + { } + + /// + /// Call this method if you want to have one more reference to the same capture. + /// + /// Additional reference to the same capture. Don't forget to call Dispose() method for object returned. + public CaptureHandle DuplicateReference() + { + DllImports.CaptureReference(handle); + return new CaptureHandle { handle = handle }; + } + + protected override bool ReleaseHandle() + { + DllImports.CaptureRelease(handle); + return true; + } + } +} diff --git a/K4AdotNet/NativeHandles/DeviceHandle.cs b/K4AdotNet/NativeHandles/DeviceHandle.cs new file mode 100644 index 0000000..49405bf --- /dev/null +++ b/K4AdotNet/NativeHandles/DeviceHandle.cs @@ -0,0 +1,17 @@ +namespace K4AdotNet.NativeHandles +{ + // Defined in k4atypes.h: + // K4A_DECLARE_HANDLE(k4a_device_t); + /// Handle to an Azure Kinect device. + internal sealed class DeviceHandle : HandleBase + { + private DeviceHandle() + { } + + protected override bool ReleaseHandle() + { + DllImports.DeviceClose(handle); + return true; + } + } +} diff --git a/K4AdotNet/NativeHandles/DllImports.cs b/K4AdotNet/NativeHandles/DllImports.cs new file mode 100644 index 0000000..fab7cdf --- /dev/null +++ b/K4AdotNet/NativeHandles/DllImports.cs @@ -0,0 +1,56 @@ +using System; +using System.Runtime.InteropServices; + +namespace K4AdotNet.NativeHandles +{ + /// Interops to some native functions from k4a.h header file. + internal static class DllImports + { + // K4A_EXPORT void k4a_capture_reference(k4a_capture_t capture_handle); + /// Add a reference to a capture. + /// Capture to add a reference to. + /// Call this function to add an additional reference to a capture. + /// This reference must be removed with . + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_reference", CallingConvention = CallingConvention.Cdecl)] + public static extern void CaptureReference(IntPtr captureHandle); + + // K4A_EXPORT void k4a_capture_release(k4a_capture_t capture_handle); + /// Release a capture. + /// Capture to release. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_release", CallingConvention = CallingConvention.Cdecl)] + public static extern void CaptureRelease(IntPtr captureHandle); + + // K4A_EXPORT void k4a_image_reference(k4a_image_t image_handle); + /// Add a reference to the image. + /// Handle of the image for which the get operation is performed on. + /// + /// References manage the lifetime of the object. When the references reach zero the object is destroyed. A caller must + /// not access the object after its reference is released. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_reference", CallingConvention = CallingConvention.Cdecl)] + public static extern void ImageReference(IntPtr imageHandle); + + // K4A_EXPORT void k4a_image_release(k4a_image_t image_handle); + /// Remove a reference from the image. + /// Handle of the image for which the get operation is performed on. + /// + /// References manage the lifetime of the object. When the references reach zero the object is destroyed. A caller must + /// not access the object after its reference is released. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_release", CallingConvention = CallingConvention.Cdecl)] + public static extern void ImageRelease(IntPtr imageHandle); + + // K4A_EXPORT void k4a_transformation_destroy(k4a_transformation_t transformation_handle); + /// Destroy transformation handle. + /// Transformation handle to destroy. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_transformation_destroy", CallingConvention = CallingConvention.Cdecl)] + public static extern void TransformationDestroy(IntPtr transformationHandle); + + // K4A_EXPORT void k4a_device_close(k4a_device_t device_handle); + /// Closes an Azure Kinect device. + /// Handle of device for which the get operation is performed on. + /// Once closed, the handle is no longer valid. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_close", CallingConvention = CallingConvention.Cdecl)] + public static extern void DeviceClose(IntPtr deviceHandle); + } +} diff --git a/K4AdotNet/NativeHandles/HandleBase.cs b/K4AdotNet/NativeHandles/HandleBase.cs new file mode 100644 index 0000000..b680c6c --- /dev/null +++ b/K4AdotNet/NativeHandles/HandleBase.cs @@ -0,0 +1,24 @@ +using System; +using System.Runtime.InteropServices; + +namespace K4AdotNet.NativeHandles +{ + // Defined in k4atypes.h: + // #define K4A_DECLARE_HANDLE(_handle_name_) + // ... + /// Base class for all native handles declared it Sensor SDK. + /// + /// Handles represent object instances in Sensor SDK. + /// Handles are opaque pointers returned by the SDK which represent an object. + /// Invalid handles are set to 0 (IntPtr.Zero). + /// + internal abstract class HandleBase : SafeHandle + { + /// Instances always own handles they store. + protected HandleBase() : base(invalidHandleValue: IntPtr.Zero, ownsHandle: true) + { } + + /// Invalid handle is IntPtr.Zero. + public override bool IsInvalid => handle == IntPtr.Zero; + } +} diff --git a/K4AdotNet/NativeHandles/ImageHandle.cs b/K4AdotNet/NativeHandles/ImageHandle.cs new file mode 100644 index 0000000..1e1bf10 --- /dev/null +++ b/K4AdotNet/NativeHandles/ImageHandle.cs @@ -0,0 +1,26 @@ +namespace K4AdotNet.NativeHandles +{ + // Defined in k4atypes.h: + // K4A_DECLARE_HANDLE(k4a_image_t); + /// Handle to an Azure Kinect image. + /// Images from a device are retrieved through a k4a_capture_t object returned by k4a_device_get_capture(). + internal sealed class ImageHandle : HandleBase + { + private ImageHandle() + { } + + /// Call this method if you want to have one more reference to the same image. + /// Additional reference to the same image. Don't forget to call Dispose() method for object returned. + public ImageHandle DuplicateReference() + { + DllImports.ImageReference(handle); + return new ImageHandle { handle = handle }; + } + + protected override bool ReleaseHandle() + { + DllImports.ImageRelease(handle); + return true; + } + } +} diff --git a/K4AdotNet/NativeHandles/TransformationHandle.cs b/K4AdotNet/NativeHandles/TransformationHandle.cs new file mode 100644 index 0000000..c203c5e --- /dev/null +++ b/K4AdotNet/NativeHandles/TransformationHandle.cs @@ -0,0 +1,18 @@ +namespace K4AdotNet.NativeHandles +{ + // Defined in k4atypes.h: + // K4A_DECLARE_HANDLE(k4a_transformation_t); + /// Handle to an Azure Kinect transformation context. + /// Handles are created with k4a_transformation_create(). + internal sealed class TransformationHandle : HandleBase + { + private TransformationHandle() + { } + + protected override bool ReleaseHandle() + { + DllImports.TransformationDestroy(handle); + return true; + } + } +} diff --git a/K4AdotNet/Sdk.cs b/K4AdotNet/Sdk.cs new file mode 100644 index 0000000..f73c662 --- /dev/null +++ b/K4AdotNet/Sdk.cs @@ -0,0 +1,8 @@ +namespace K4AdotNet +{ + public static class Sdk + { + /// Name of Kinect for Azure Sensor SDK DLL. + public const string K4A_DLL_NAME = "k4a"; + } +} diff --git a/K4AdotNet/Sensor/Calibration.cs b/K4AdotNet/Sensor/Calibration.cs new file mode 100644 index 0000000..fded31f --- /dev/null +++ b/K4AdotNet/Sensor/Calibration.cs @@ -0,0 +1,48 @@ +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef struct _k4a_calibration_t + // { + // k4a_calibration_camera_t depth_camera_calibration; + // k4a_calibration_camera_t color_camera_calibration; + // k4a_calibration_extrinsics_t extrinsics[K4A_CALIBRATION_TYPE_NUM][K4A_CALIBRATION_TYPE_NUM]; + // k4a_depth_mode_t depth_mode; + // k4a_color_resolution_t color_resolution; + // } k4a_calibration_t; + /// Information about device calibration in particular depth mode and color resolution. + [StructLayout(LayoutKind.Sequential)] + public partial struct Calibration + { + /// Depth camera calibration. + [MarshalAs(UnmanagedType.Struct)] + public CameraCalibration DepthCameraCalibration; + + /// Color camera calibration. + [MarshalAs(UnmanagedType.Struct)] + public CameraCalibration ColorCameraCalibration; + + /// Extrinsic transformation parameters. + /// + /// The extrinsic parameters allow 3D coordinate conversions between depth camera, color camera, the IMU's gyroscope + /// and accelerometer.To transform from a source to a target 3D coordinate system, use the parameters stored + /// under Extrinsics[source * (int)CalibrationSensor.Count + target]. + /// + /// + [MarshalAs(UnmanagedType.ByValArray, ArraySubType = UnmanagedType.Struct, SizeConst = (int)CalibrationGeometry.Count * (int)CalibrationGeometry.Count)] + public CalibrationExtrinsics[] Extrinsics; + + /// Depth camera mode for which calibration was obtained. + public DepthMode DepthMode; + + /// Color camera resolution for which calibration was obtained. + public ColorResolution ColorResolution; + + public CalibrationExtrinsics GetExtrinsics(CalibrationGeometry sourceSensor, CalibrationGeometry targetSensor) + => Extrinsics[(int)sourceSensor * (int)CalibrationGeometry.Count + (int)targetSensor]; + + public void SetExtrinsics(CalibrationGeometry sourceSensor, CalibrationGeometry targetSensor, CalibrationExtrinsics extrinsics) + => Extrinsics[(int)sourceSensor * (int)CalibrationGeometry.Count + (int)targetSensor] = extrinsics; + } +} diff --git a/K4AdotNet/Sensor/CalibrationExtrinsics.cs b/K4AdotNet/Sensor/CalibrationExtrinsics.cs new file mode 100644 index 0000000..6480c22 --- /dev/null +++ b/K4AdotNet/Sensor/CalibrationExtrinsics.cs @@ -0,0 +1,23 @@ +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef struct _k4a_calibration_extrinsics_t + // { + // float rotation[9]; + // float translation[3]; + // } k4a_calibration_extrinsics_t; + /// Extrinsic calibration defines the physical relationship between two separate sensors inside Kinect for Azure device. + [StructLayout(LayoutKind.Sequential)] + public struct CalibrationExtrinsics + { + /// Rotation matrix. + [MarshalAs(UnmanagedType.Struct)] + public Float3x3 Rotation; + + /// Translation vector. + [MarshalAs(UnmanagedType.Struct)] + public Float3 Translation; + } +} diff --git a/K4AdotNet/Sensor/CalibrationGeometry.cs b/K4AdotNet/Sensor/CalibrationGeometry.cs new file mode 100644 index 0000000..8dc0bff --- /dev/null +++ b/K4AdotNet/Sensor/CalibrationGeometry.cs @@ -0,0 +1,34 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_CALIBRATION_TYPE_UNKNOWN = -1, + // K4A_CALIBRATION_TYPE_DEPTH, + // K4A_CALIBRATION_TYPE_COLOR, + // K4A_CALIBRATION_TYPE_GYRO, + // K4A_CALIBRATION_TYPE_ACCEL, + // K4A_CALIBRATION_TYPE_NUM, + // } k4a_calibration_type_t; + /// Kinect for Azure device consists of different sensors each of them has their own coordinate system and calibration extrinsics. + public enum CalibrationGeometry + { + /// Calibration type is unknown. + Unknown = -1, + + /// Depth sensor. + Depth, + + /// Color sensor. + Color, + + /// Gyroscope sensor. + Gyro, + + /// Accelerometer sensor. + Accel, + + /// Number of types excluding unknown type. + Count, + } +} diff --git a/K4AdotNet/Sensor/CalibrationIntrinsicParameters.cs b/K4AdotNet/Sensor/CalibrationIntrinsicParameters.cs new file mode 100644 index 0000000..208db27 --- /dev/null +++ b/K4AdotNet/Sensor/CalibrationIntrinsicParameters.cs @@ -0,0 +1,156 @@ +using System; +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef union + // { + // struct _param + // { + // float cx; + // float cy; + // float fx; + // float fy; + // float k1; + // float k2; + // float k3; + // float k4; + // float k5; + // float k6; + // float codx; + // float cody; + // float p2; + // float p1; + // float metric_radius; + // } param; + // float v[15]; + // } k4a_calibration_intrinsic_parameters_t; + /// Intrinsic calibration represents the internal optical properties of the camera. + /// Azure Kinect devices are calibrated with Brown Conrady which is compatible with OpenCV. + [StructLayout(LayoutKind.Sequential)] + public struct CalibrationIntrinsicParameters + { + /// Principal point in image, x. Corresponding index in array: 0. + public float Cx; + + /// Principal point in image, y. Corresponding index in array: 1. + public float Cy; + + /// Focal length x. Corresponding index in array: 2. + public float Fx; + + /// Focal length y. Corresponding index in array: 3. + public float Fy; + + /// k1 radial distortion coefficient. Corresponding index in array: 4. + public float K1; + + /// kw radial distortion coefficient. Corresponding index in array: 5. + public float K2; + + /// k3 radial distortion coefficient. Corresponding index in array: 6. + public float K3; + + /// k4 radial distortion coefficient. Corresponding index in array: 7. + public float K4; + + /// k5 radial distortion coefficient. Corresponding index in array: 8. + public float K5; + + /// k6 radial distortion coefficient. Corresponding index in array: 9. + public float K6; + + /// Center of distortion in Z=1 plane, x (only used for Rational6KT). Corresponding index in array: 10. + public float Codx; + + /// Center of distortion in Z=1 plane, y (only used for Rational6KT). Corresponding index in array: 11. + public float Cody; + + /// Tangential distortion coefficient 2. Corresponding index in array: 12. + public float P2; + + /// Tangential distortion coefficient 1. Corresponding index in array: 13. + public float P1; + + /// Metric radius. Corresponding index in array: 14. + public float MetricRadius; + + public CalibrationIntrinsicParameters(float[] values) + { + if (values is null) + throw new ArgumentNullException(nameof(values)); + if (values.Length != 15) + throw new ArgumentOutOfRangeException(nameof(values) + "." + nameof(values.Length)); + + Cx = values[0]; + Cy = values[1]; + Fx = values[2]; + Fy = values[3]; + K1 = values[4]; + K2 = values[5]; + K3 = values[6]; + K4 = values[7]; + K5 = values[8]; + K6 = values[9]; + Codx = values[10]; + Cody = values[11]; + P2 = values[12]; + P1 = values[13]; + MetricRadius = values[14]; + } + + /// Array representation of intrinsic model parameters. + public float[] ToArray() + => new[] { Cx, Cy, Fx, Fy, K1, K2, K3, K4, K5, K6, Codx, Cody, P2, P1, MetricRadius }; + + public float this[int index] + { + get + { + switch (index) + { + case 0: return Cx; + case 1: return Cy; + case 2: return Fx; + case 3: return Fy; + case 4: return K1; + case 5: return K2; + case 6: return K3; + case 7: return K4; + case 8: return K5; + case 9: return K6; + case 10: return Codx; + case 11: return Cody; + case 12: return P2; + case 13: return P1; + case 14: return MetricRadius; + default: throw new ArgumentOutOfRangeException(nameof(index)); + } + } + + set + { + switch (index) + { + case 0: Cx = value; break; + case 1: Cy = value; break; + case 2: Fx = value; break; + case 3: Fy = value; break; + case 4: K1 = value; break; + case 5: K2 = value; break; + case 6: K3 = value; break; + case 7: K4 = value; break; + case 8: K5 = value; break; + case 9: K6 = value; break; + case 10: Codx = value; break; + case 11: Cody = value; break; + case 12: P2 = value; break; + case 13: P1 = value; break; + case 14: MetricRadius = value; break; + default: throw new ArgumentOutOfRangeException(nameof(index)); + } + } + } + } +} diff --git a/K4AdotNet/Sensor/CalibrationIntrinsics.cs b/K4AdotNet/Sensor/CalibrationIntrinsics.cs new file mode 100644 index 0000000..4fab173 --- /dev/null +++ b/K4AdotNet/Sensor/CalibrationIntrinsics.cs @@ -0,0 +1,30 @@ +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef struct _k4a_calibration_intrinsics_t + // { + // k4a_calibration_model_type_t type; /**< Type of calibration model used*/ + // unsigned int parameter_count; /**< Number of valid entries in parameters*/ + // k4a_calibration_intrinsic_parameters_t parameters; /**< Calibration parameters*/ + // } k4a_calibration_intrinsics_t; + /// Camera sensor intrinsic calibration data. + /// + /// Intrinsic calibration represents the internal optical properties of the camera. + /// + /// Azure Kinect devices are calibrated with Brown Conrady which is compatible with OpenCV. + /// + [StructLayout(LayoutKind.Sequential)] + public struct CalibrationIntrinsics + { + /// Type of calibration model used. + public CalibrationModel Model; + + /// Number of valid entries in . + public int ParameterCount; + + /// Calibration parameters. + public CalibrationIntrinsicParameters Parameters; + } +} diff --git a/K4AdotNet/Sensor/CalibrationModel.cs b/K4AdotNet/Sensor/CalibrationModel.cs new file mode 100644 index 0000000..3730643 --- /dev/null +++ b/K4AdotNet/Sensor/CalibrationModel.cs @@ -0,0 +1,30 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_CALIBRATION_LENS_DISTORTION_MODEL_UNKNOWN = 0, + // K4A_CALIBRATION_LENS_DISTORTION_MODEL_THETA, + // K4A_CALIBRATION_LENS_DISTORTION_MODEL_POLYNOMIAL_3K, + // K4A_CALIBRATION_LENS_DISTORTION_MODEL_RATIONAL_6KT, + // K4A_CALIBRATION_LENS_DISTORTION_MODEL_BROWN_CONRADY, + // } k4a_calibration_model_type_t; + /// The model used to interpret the calibration parameters. + public enum CalibrationModel + { + /// Calibration model is unknown. + Unknown = 0, + + /// Calibration model is Theta (arctan). + Theta, + + /// Calibration model Polynomial 3K. + Polynomial3K, + + /// Calibration model Rational 6KT. + Rational6KT, + + /// Calibration model Brown Conrady (compatible with OpenCV). + BrownConrady, + } +} diff --git a/K4AdotNet/Sensor/CameraCalibration.cs b/K4AdotNet/Sensor/CameraCalibration.cs new file mode 100644 index 0000000..5917736 --- /dev/null +++ b/K4AdotNet/Sensor/CameraCalibration.cs @@ -0,0 +1,35 @@ +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef struct _k4a_calibration_camera_t + // { + // k4a_calibration_extrinsics_t extrinsics; + // k4a_calibration_intrinsics_t intrinsics; + // int resolution_width; + // int resolution_height; + // float metric_radius; + // } k4a_calibration_camera_t; + /// Camera calibration contains intrinsic and extrinsic calibration information for depth/color camera. + [StructLayout(LayoutKind.Sequential)] + public struct CameraCalibration +{ + /// Extrinsic calibration data. + [MarshalAs(UnmanagedType.Struct)] + public CalibrationExtrinsics Extrinsics; + + /// Intrinsic calibration data. + [MarshalAs(UnmanagedType.Struct)] + public CalibrationIntrinsics Intrinsics; + + /// Resolution width of the camera. + public int ResolutionWidth; + + /// Resolution height of the camera. + public int ResolutionHeight; + + /// Max FOV of the camera. + public float MetricRadius; + } +} diff --git a/K4AdotNet/Sensor/ColorControlCommand.cs b/K4AdotNet/Sensor/ColorControlCommand.cs new file mode 100644 index 0000000..cce32c2 --- /dev/null +++ b/K4AdotNet/Sensor/ColorControlCommand.cs @@ -0,0 +1,95 @@ +using System; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_COLOR_CONTROL_EXPOSURE_TIME_ABSOLUTE = 0, + // K4A_COLOR_CONTROL_AUTO_EXPOSURE_PRIORITY, + // K4A_COLOR_CONTROL_BRIGHTNESS, + // K4A_COLOR_CONTROL_CONTRAST, + // K4A_COLOR_CONTROL_SATURATION, + // K4A_COLOR_CONTROL_SHARPNESS, + // K4A_COLOR_CONTROL_WHITEBALANCE, + // K4A_COLOR_CONTROL_BACKLIGHT_COMPENSATION, + // K4A_COLOR_CONTROL_GAIN, + // K4A_COLOR_CONTROL_POWERLINE_FREQUENCY + // } k4a_color_control_command_t; + /// Color sensor control commands. + /// + /// Control values set on a device are reset only when the device is power cycled. The device will retain the settings + /// even if the device is closed or the application is restarted. + /// + public enum ColorControlCommand + { + /// Exposure time setting. + /// + /// May be set to or . + /// Exposure time is measured in microseconds. + /// + ExposureTimeAbsolute = 0, + + /// Exposure or Framerate priority setting. + /// + /// May only be set to . + /// Value of 0 means framerate priority. Value of 1 means exposure priority. + /// Using exposure priority may impact the framerate of both the color and depth cameras. + /// Deprecated starting in 1.1.0. Please discontinue usage, firmware does not support this. + /// + [Obsolete] + AutoExposurePriority, + + /// Brightness setting. + /// + /// May only be set to . + /// The valid range is 0 to 255. The default value is 128. + /// + Brightness, + + /// Contrast setting. + /// + /// May only be set to . + /// + Contrast, + + /// Saturation setting. + /// + /// May only be set to . + /// + Saturation, + + /// Sharpness setting. + /// + /// May only be set to . + /// + Sharpness, + + /// White balance setting. + /// + /// May be set to or . + /// The unit is degrees Kelvin. The setting must be set to a value evenly divisible by 10 degrees. + /// + Whitebalance, + + /// Backlight compensation setting. + /// + /// May only be set to . + /// Value of 0 means backlight compensation is disabled. Value of 1 means backlight compensation is enabled. + /// + BacklightCompensation, + + /// Gain setting. + /// + /// May only be set to . + /// + Gain, + + /// Powerline frequency setting. + /// + /// May only be set to . + /// Value of 1 sets the powerline compensation to 50 Hz. Value of 2 sets the powerline compensation to 60 Hz. + /// + PowerlineFrequency, + } +} diff --git a/K4AdotNet/Sensor/ColorControlMode.cs b/K4AdotNet/Sensor/ColorControlMode.cs new file mode 100644 index 0000000..b7a504d --- /dev/null +++ b/K4AdotNet/Sensor/ColorControlMode.cs @@ -0,0 +1,18 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_COLOR_CONTROL_MODE_AUTO = 0, + // K4A_COLOR_CONTROL_MODE_MANUAL, + // } k4a_color_control_mode_t; + /// Color sensor control mode. + public enum ColorControlMode + { + /// set the associated to auto mode + Auto = 0, + + /// set the associated to manual mode + Manual, + } +} diff --git a/K4AdotNet/Sensor/ColorResolution.cs b/K4AdotNet/Sensor/ColorResolution.cs new file mode 100644 index 0000000..7856d8e --- /dev/null +++ b/K4AdotNet/Sensor/ColorResolution.cs @@ -0,0 +1,39 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_COLOR_RESOLUTION_OFF = 0, + // K4A_COLOR_RESOLUTION_720P, + // K4A_COLOR_RESOLUTION_1080P, + // K4A_COLOR_RESOLUTION_1440P, + // K4A_COLOR_RESOLUTION_1536P, + // K4A_COLOR_RESOLUTION_2160P, + // K4A_COLOR_RESOLUTION_3072P, + // } k4a_color_resolution_t; + /// Color sensor resolutions. + /// + public enum ColorResolution + { + /// Color camera will be turned off with this setting + Off = 0, + + /// 1280x720 16:9 + R720p, + + /// 1920x1080 16:9 + R1080p, + + /// 2560x1440 16:9 + R1440p, + + /// 2048x1536 4:3 + R1536p, + + /// 3840x2160 16:9 + R2160p, + + /// 4096x3072 4:3 + R3072p, + } +} diff --git a/K4AdotNet/Sensor/ColorResolutionExtenstions.cs b/K4AdotNet/Sensor/ColorResolutionExtenstions.cs new file mode 100644 index 0000000..9f0b0d4 --- /dev/null +++ b/K4AdotNet/Sensor/ColorResolutionExtenstions.cs @@ -0,0 +1,17 @@ +namespace K4AdotNet.Sensor +{ + /// Helper extension methods for enumeration. + public static class ColorResolutionExtenstions + { + private static readonly int[] widths = new[] { 0, 1280, 1920, 2560, 2048, 3840, 4096 }; + private static readonly int[] heights = new[] { 0, 720, 1080, 1440, 1536, 2160, 3072 }; + + /// Returns image width in pixels for a given resolution. + public static int Width(this ColorResolution resolution) + => widths[(int)resolution]; + + /// Returns image height in pixels for a given resolution. + public static int Height(this ColorResolution resolution) + => heights[(int)resolution]; + } +} diff --git a/K4AdotNet/Sensor/DepthMode.cs b/K4AdotNet/Sensor/DepthMode.cs new file mode 100644 index 0000000..e4e2b03 --- /dev/null +++ b/K4AdotNet/Sensor/DepthMode.cs @@ -0,0 +1,41 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_DEPTH_MODE_OFF = 0, + // K4A_DEPTH_MODE_NFOV_2X2BINNED, + // K4A_DEPTH_MODE_NFOV_UNBINNED, + // K4A_DEPTH_MODE_WFOV_2X2BINNED, + // K4A_DEPTH_MODE_WFOV_UNBINNED, + // K4A_DEPTH_MODE_PASSIVE_IR, + // } k4a_depth_mode_t; + /// Depth sensor capture modes. + /// + /// See the hardware specification for additional details on the field of view, and supported frame rates + /// for each mode. + /// + /// Binned modes reduce the captured camera resolution by combining adjacent sensor pixels into a bin. + /// + /// + public enum DepthMode + { + /// Depth sensor will be turned off with this setting. + Off = 0, + + /// Depth captured at 320x288. Passive IR is also captured at 320x288. + NarrowView2x2Binned, + + /// Depth captured at 640x576. Passive IR is also captured at 640x576. + NarrowViewUnbinned, + + /// Depth captured at 512x512. Passive IR is also captured at 512x512. + WideView2x2Binned, + + /// Depth captured at 1024x1024. Passive IR is also captured at 1024x1024. + WideViewUnbinned, + + /// Passive IR only, captured at 1024x1024. + PassiveIR, + } +} diff --git a/K4AdotNet/Sensor/DepthModeExtenstions.cs b/K4AdotNet/Sensor/DepthModeExtenstions.cs new file mode 100644 index 0000000..2c3c78b --- /dev/null +++ b/K4AdotNet/Sensor/DepthModeExtenstions.cs @@ -0,0 +1,25 @@ +namespace K4AdotNet.Sensor +{ + /// Helper extension methods for enumeration. + public static class DepthModeExtenstions + { + private static readonly int[] widths = new[] { 0, 320, 640, 512, 1024, 1024 }; + private static readonly int[] heights = new[] { 0, 288, 576, 512, 1024, 1024 }; + + /// Returns depth and IR images width in pixels for a given depth mode. + public static int Width(this DepthMode depthMode) + => widths[(int)depthMode]; + + /// Returns depth and IR images height in pixels for a given depth mode. + public static int Height(this DepthMode depthMode) + => heights[(int)depthMode]; + + /// Is depth mode has wide field of view? + public static bool IsWideView(this DepthMode depthMode) + => depthMode >= DepthMode.WideView2x2Binned; + + /// Does depth mode use binning for smoothing/filtering? + public static bool IsBinned(this DepthMode depthMode) + => depthMode == DepthMode.NarrowView2x2Binned || depthMode == DepthMode.WideView2x2Binned; + } +} diff --git a/K4AdotNet/Sensor/DeviceConfiguration.cs b/K4AdotNet/Sensor/DeviceConfiguration.cs new file mode 100644 index 0000000..b51e2ae --- /dev/null +++ b/K4AdotNet/Sensor/DeviceConfiguration.cs @@ -0,0 +1,107 @@ +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef struct _k4a_device_configuration_t + // { + // k4a_image_format_t color_format; + // k4a_color_resolution_t color_resolution; + // k4a_depth_mode_t depth_mode; + // k4a_fps_t camera_fps; + // bool synchronized_images_only; + // int32_t depth_delay_off_color_usec; + // k4a_wired_sync_mode_t wired_sync_mode; + // uint32_t subordinate_delay_off_master_usec; + // bool disable_streaming_indicator; + // } k4a_device_configuration_t; + /// Configuration parameters for an Azure Kinect device. + [StructLayout(LayoutKind.Sequential)] + public struct DeviceConfiguration + { + /// Image format to capture with the color camera. + /// + /// The color camera does not natively produce BGRA32 images. + /// Setting value will result in higher CPU utilization. + /// + public ImageFormat ColorFormat; + + /// Image resolution to capture with the color camera. + public ColorResolution ColorResolution; + + /// Capture mode for the depth camera. + public DepthMode DepthMode; + + /// Desired frame rate for the color and depth camera. + public FrameRate CameraFps; + + /// Only produce capture objects if they contain synchronized color and depth images. + /// + /// This setting controls the behavior in which images are dropped when images are produced faster than they can be + /// read, or if there are errors in reading images from the device. + /// + /// If set to , capture objects will only be produced with both color and depth images. + /// If set to , capture objects may be produced only a single image when the corresponding image is dropped. + /// + /// Setting this to ensures that the caller receives all of the images received from the camera, regardless of + /// whether the corresponding images expected in the capture are available. + /// + /// If either the color or depth camera are disabled, this setting has no effect. + /// + public bool SynchronizedImagesOnly; + + /// Desired delay between the capture of the color image and the capture of the depth image. + /// + /// A negative value indicates that the depth image should be captured before the color image. + /// Any value between negative and positive one capture period is valid. + /// + [MarshalAs(UnmanagedType.Struct)] + public Delay DepthDelayOffColor; + + /// The external synchronization mode. + public WiredSyncMode WiredSyncMode; + + /// The external synchronization timing. + /// + /// If this camera is a subordinate, this sets the capture delay between the color camera capture and the external + /// input pulse. A setting of zero indicates that the master and subordinate color images should be aligned. + /// + /// This setting does not effect the 'Sync out' connection. + /// + /// This value must be positive and range from zero to one capture period. + /// + /// If this is not a subordinate, then this value is ignored. + /// + [MarshalAs(UnmanagedType.Struct)] + public Delay SubordinateDelayOffMaster; + + /// Streaming indicator automatically turns on when the color or depth camera's are in use. + /// This setting disables that behavior and keeps the LED in an off state. + public bool DisableStreamingIndicator; + + // Defined in k4atypes.h: + // static const k4a_device_configuration_t K4A_DEVICE_CONFIG_INIT_DISABLE_ALL = { K4A_IMAGE_FORMAT_COLOR_MJPG, + // K4A_COLOR_RESOLUTION_OFF, + // K4A_DEPTH_MODE_OFF, + // K4A_FRAMES_PER_SECOND_30, + // false, + // 0, + // K4A_WIRED_SYNC_MODE_STANDALONE, + // 0, + // false }; + /// Initial configuration setting for disabling all sensors. + /// Use this setting to initialize a to a disabled state. + public static readonly DeviceConfiguration DisableAll = new DeviceConfiguration + { + ColorFormat = ImageFormat.ColorMJPG, + ColorResolution = ColorResolution.Off, + DepthMode = DepthMode.Off, + CameraFps = FrameRate.Thirty, + SynchronizedImagesOnly = false, + DepthDelayOffColor = Delay.Zero, + WiredSyncMode = WiredSyncMode.Standalone, + SubordinateDelayOffMaster = Delay.Zero, + DisableStreamingIndicator = false, + }; + } +} diff --git a/K4AdotNet/Sensor/DllImports.cs b/K4AdotNet/Sensor/DllImports.cs new file mode 100644 index 0000000..a0508d3 --- /dev/null +++ b/K4AdotNet/Sensor/DllImports.cs @@ -0,0 +1,942 @@ +using System; +using System.Runtime.InteropServices; +using System.Text; + +namespace K4AdotNet.Sensor +{ + /// Interops to most of native functions from k4a.h header file. + internal static class DllImports + { + /// Default device index. + /// Passed as an argument to to open the default sensor. + public const uint DEFAULT_DEVICE_INDEX = 0; + + // K4A_EXPORT uint32_t k4a_device_get_installed_count(void); + /// Gets the number of connected devices. + /// Number of sensors connected to the PC. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_installed_count", CallingConvention = CallingConvention.Cdecl)] + public static extern uint DeviceGetInstalledCount(); + + // K4A_EXPORT k4a_result_t k4a_device_open(uint32_t index, k4a_device_t *device_handle); + /// Open an Azure Kinect device. + /// The index of the device to open, starting with 0. Use constant as value for this parameter to open default device. + /// Output parameter which on success will return a handle to the device. + /// if the device was opened successfully. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_open", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceOpen(uint index, out NativeHandles.DeviceHandle deviceHandle); + + // K4A_EXPORT k4a_wait_result_t k4a_device_get_capture(k4a_device_t device_handle, + // k4a_capture_t *capture_handle, + // int32_t timeout_in_ms); + /// Reads a sensor capture. + /// Handle obtained by . + /// If successful this contains a handle to a capture object. + /// + /// Specifies the time the function should block waiting for the capture. + /// If set to , the function will return without blocking. + /// Passing will block indefinitely until data is available, the + /// device is disconnected, or another error occurs. + /// + /// + /// if a capture is returned. + /// If a capture is not available before the timeout elapses, the function will return . + /// All other failures will return . + /// + /// + /// This function needs to be called while the device is in a running state; + /// after is called and before is called. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_capture", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.WaitResult DeviceGetCapture( + NativeHandles.DeviceHandle deviceHandle, + out NativeHandles.CaptureHandle captureHandle, + Timeout timeout); + + // K4A_EXPORT k4a_wait_result_t k4a_device_get_imu_sample(k4a_device_t device_handle, + // k4a_imu_sample_t *imu_sample, + // int32_t timeout_in_ms); + /// Reads an IMU sample. + /// Handle obtained by . + /// Information about IMU sample. + /// + /// Specifies the time the function should block waiting for the sample. + /// If set to , the function will return without blocking. + /// Passing will block indefinitely until data is available, the + /// device is disconnected, or another error occurs. + /// + /// + /// if a sample is returned. + /// If a sample is not available before the timeout elapses, the function will return . + /// All other failures will return . + /// + /// + /// This function needs to be called while the device is in a running state; + /// after is called and before is called. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_imu_sample", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.WaitResult DeviceGetImuSample( + NativeHandles.DeviceHandle deviceHandle, + out ImuSample imuSample, + Timeout timeout); + + // K4A_EXPORT k4a_result_t k4a_capture_create(k4a_capture_t *capture_handle); + /// Create an empty capture object. + /// Output parameter which on success will return a handle to the capture. + /// if the device was opened successfully. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_create", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result CaptureCreate(out NativeHandles.CaptureHandle captureHandle); + + // K4A_EXPORT k4a_image_t k4a_capture_get_color_image(k4a_capture_t capture_handle); + /// Get the color image associated with the given capture. + /// Capture handle containing the image. + /// Image handle. + /// Call this function to access the color image part of this capture. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_get_color_image", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeHandles.ImageHandle CaptureGetColorImage(NativeHandles.CaptureHandle captureHandle); + + // K4A_EXPORT k4a_image_t k4a_capture_get_depth_image(k4a_capture_t capture_handle); + /// Get the depth image associated with the given capture. + /// Capture handle containing the image. + /// Image handle. + /// Call this function to access the depth image part of this capture. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_get_depth_image", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeHandles.ImageHandle CaptureGetDepthImage(NativeHandles.CaptureHandle captureHandle); + + // K4A_EXPORT k4a_image_t k4a_capture_get_ir_image(k4a_capture_t capture_handle); + /// Get the IR image associated with the given capture. + /// Capture handle containing the image. + /// Image handle. + /// Call this function to access the IR image part of this capture. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_get_ir_image", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeHandles.ImageHandle CaptureGetIRImage(NativeHandles.CaptureHandle captureHandle); + + // K4A_EXPORT void k4a_capture_set_color_image(k4a_capture_t capture_handle, k4a_image_t image_handle); + /// Set or add a color image to the associated capture. + /// Capture handle to hold the image. + /// Image handle containing the image or to remove color image from a given capture if any. + /// If there is already a color image contained in the capture, the existing image will be dereferenced and replaced with the new image. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_set_color_image", CallingConvention = CallingConvention.Cdecl)] + public static extern void CaptureSetColorImage(NativeHandles.CaptureHandle captureHandle, NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT void k4a_capture_set_depth_image(k4a_capture_t capture_handle, k4a_image_t image_handle); + /// Set or add a depth image to the associated capture. + /// Capture handle to hold the image. + /// Image handle containing the image or to remove depth image from a given capture if any. + /// If there is already a depth image contained in the capture, the existing image will be dereferenced and replaced with the new image. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_set_depth_image", CallingConvention = CallingConvention.Cdecl)] + public static extern void CaptureSetDepthImage(NativeHandles.CaptureHandle captureHandle, NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT void k4a_capture_set_ir_image(k4a_capture_t capture_handle, k4a_image_t image_handle); + /// Set or add a IR image to the associated capture. + /// Capture handle to hold the image. + /// Image handle containing the image or to remove IR image from a given capture if any. + /// If there is already a IR image contained in the capture, the existing image will be dereferenced and replaced with the new image. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_set_ir_image", CallingConvention = CallingConvention.Cdecl)] + public static extern void CaptureSetIRImage(NativeHandles.CaptureHandle captureHandle, NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT void k4a_capture_set_temperature_c(k4a_capture_t capture_handle, float temperature_c); + /// Set the temperature associated with the capture. + /// Capture handle to set the temperature on. + /// Temperature in Celsius to store. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_set_temperature_c", CallingConvention = CallingConvention.Cdecl)] + public static extern void CaptureSetTemperatureC(NativeHandles.CaptureHandle captureHandle, float temperatureC); + + // K4A_EXPORT float k4a_capture_get_temperature_c(k4a_capture_t capture_handle); + /// Get the temperature associated with the capture. + /// Capture handle to retrieve the temperature from. + /// + /// This function returns the temperature of the device at the time of the capture in Celsius. + /// If the temperature is unavailable, the function will return . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_capture_get_temperature_c", CallingConvention = CallingConvention.Cdecl)] + public static extern float CaptureGetTemperatureC(NativeHandles.CaptureHandle captureHandle); + + // K4A_EXPORT k4a_result_t k4a_image_create(k4a_image_format_t format, + // int width_pixels, + // int height_pixels, + // int stride_bytes, + // k4a_image_t *image_handle); + /// + /// Create an image. + /// + /// The format of the image that will be stored in this image container. + /// Width in pixels. + /// Height in pixels. + /// The number of bytes per horizontal line of the image. + /// Handle of created image in case of success. + /// on success. + /// This function is used to create images of formats that have consistent stride. + /// The function is not suitable for compressed formats that may not be represented by the same number of bytes per line. + /// The function will allocate an image buffer of size * bytes. + /// To create an image object without the API allocating memory, or to represent an image that has a non-deterministic + /// stride, use . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_create", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result ImageCreate( + ImageFormat format, + int widthPixels, + int heightPixels, + int strideBytes, + out NativeHandles.ImageHandle imageHandle); + + // typedef void(k4a_memory_destroy_cb_t)(void *buffer, void *context); + /// Callback function for a memory object being destroyed. + /// The buffer pointer that was supplied by the caller. + /// The context for the memory object that needs to be destroyed that was supplied by the caller. + public delegate void MemoryDestroyCallback(IntPtr buffer, IntPtr context); + + // K4A_EXPORT k4a_result_t k4a_image_create_from_buffer(k4a_image_format_t format, + // int width_pixels, + // int height_pixels, + // int stride_bytes, + // uint8_t* buffer, + // size_t buffer_size, + // k4a_memory_destroy_cb_t* buffer_release_cb, + // void* buffer_release_cb_context, + // k4a_image_t* image_handle); + /// Create an image from a pre-allocated buffer. + /// The format of the image that will be stored in this image container. + /// Width in pixels. + /// Height in pixels. + /// The number of bytes per horizontal line of the image. + /// Pointer to a pre-allocated image buffer. + /// Size in bytes of the pre-allocated image buffer. + /// + /// Callback to the buffer free function, called when all references to the buffer have been released. + /// This parameter is optional (can be ). + /// + /// Context for the buffer free function. This value will be called as 2nd parameter to + /// when the callback is invoked. + /// + /// Handle of created image in case of success. + /// on success. + /// + /// This function creates an from a pre-allocated buffer. When all references to this object reach zero + /// the provided callback function is called so that the memory can be released. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_create_from_buffer", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result ImageCreateFromBuffer( + ImageFormat format, + int widthPixels, + int heightPixels, + int strideBytes, + IntPtr buffer, + UIntPtr bufferSize, + MemoryDestroyCallback bufferReleaseCallback, + IntPtr bufferReleaseCallbackContext, + out NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT uint8_t *k4a_image_get_buffer(k4a_image_t image_handle); + /// Get the image buffer. + /// Handle of the image for which the get operation is performed on. + /// + /// The function will return if there is an error, and will normally return a pointer to the image buffer. + /// + /// Use this buffer to access the raw image data. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_buffer", CallingConvention = CallingConvention.Cdecl)] + public static extern IntPtr ImageGetBuffer(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT size_t k4a_image_get_size(k4a_image_t image_handle); + /// Get the image buffer size. + /// Handle of the image for which the get operation is performed on. + /// The function will return if there is an error, and will normally return the image size. + /// Use this function to know what the size of the image buffer is returned by . + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_size", CallingConvention = CallingConvention.Cdecl)] + public static extern UIntPtr ImageGetSize(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT k4a_image_format_t k4a_image_get_format(k4a_image_t image_handle); + /// Get the format of the image. + /// Handle of the image for which the get operation is performed on. + /// + /// This function is not expected to fail, all images are created with a known format. + /// If the is invalid, the function will return . + /// + /// Use this function to determine the format of the image buffer. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_format", CallingConvention = CallingConvention.Cdecl)] + public static extern ImageFormat ImageGetFormat(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT int k4a_image_get_width_pixels(k4a_image_t image_handle); + /// Get the image width in pixels. + /// Handle of the image for which the get operation is performed on. + /// + /// This function is not expected to fail, all images are created with a known width. + /// If the is invalid, the function will return 0. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_width_pixels", CallingConvention = CallingConvention.Cdecl)] + public static extern int ImageGetWidthPixels(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT int k4a_image_get_height_pixels(k4a_image_t image_handle); + /// Get the image height in pixels. + /// Handle of the image for which the get operation is performed on. + /// + /// This function is not expected to fail, all images are created with a known height. + /// If the is invalid, the function will return 0. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_height_pixels", CallingConvention = CallingConvention.Cdecl)] + public static extern int ImageGetHeightPixels(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT int k4a_image_get_stride_bytes(k4a_image_t image_handle); + /// Get the image stride in bytes. + /// Handle of the image for which the get operation is performed on. + /// + /// This function is not expected to fail, all images are created with a known stride. + /// If the is invalid or the image's format does not have a stride, the function will return 0. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_stride_bytes", CallingConvention = CallingConvention.Cdecl)] + public static extern int ImageGetStrideBytes(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT uint64_t k4a_image_get_timestamp_usec(k4a_image_t image_handle); + /// Get the image time stamp. + /// Handle of the image for which the get operation is performed on. + /// + /// If the is invalid or if no time stamp was set for the image, + /// this function will return . + /// It is also possible for to be a valid time stamp originating from the beginning + /// of a recording or the start of streaming. + /// + /// + /// Returns the time stamp of the image. Time stamps are recorded by the device and represent the mid-point of exposure. + /// They may be used for relative comparison, but their absolute value has no defined meaning. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_timestamp_usec", CallingConvention = CallingConvention.Cdecl)] + public static extern TimeStamp ImageGetTimeStamp(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT uint64_t k4a_image_get_exposure_usec(k4a_image_t image_handle); + /// Get the image exposure in microseconds. + /// Handle of the image for which the get operation is performed on. + /// + /// If the is invalid or if no exposure was set for the image, + /// this function will return 0. Otherwise, + /// it will return the image exposure time in microseconds. + /// + /// Returns an exposure time in microseconds. This is only supported on color image formats. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_exposure_usec", CallingConvention = CallingConvention.Cdecl)] + public static extern ulong ImageGetExposureUsec(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT uint32_t k4a_image_get_white_balance(k4a_image_t image_handle); + /// Get the image white balance. + /// Handle of the image for which the get operation is performed on. + /// + /// Returns the image white balance in Kelvin. If is invalid, or the white balance was not set or + /// not applicable to the image, the function will return 0. + /// + /// Returns the image's white balance. This function is only valid for color captures, and not for depth or IR captures. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_white_balance", CallingConvention = CallingConvention.Cdecl)] + public static extern uint ImageGetWhiteBalance(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT uint32_t k4a_image_get_iso_speed(k4a_image_t image_handle); + /// Get the image ISO speed. + /// Handle of the image for which the get operation is performed on. + /// + /// Returns the ISO speed of the image. 0 indicates the ISO speed was not available or an error occurred. + /// + /// This function is only valid for color captures, and not for depth or IR captures. + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_get_iso_speed", CallingConvention = CallingConvention.Cdecl)] + public static extern uint ImageGetIsoSpeed(NativeHandles.ImageHandle imageHandle); + + // K4A_EXPORT void k4a_image_set_timestamp_usec(k4a_image_t image_handle, uint64_t timestamp_usec); + /// Set the time stamp, in microseconds, of the image. + /// Handle of the image to set the timestamp on. + /// Time stamp of the image. + /// + /// Use this function in conjunction with + /// or to construct an image. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_set_timestamp_usec", CallingConvention = CallingConvention.Cdecl)] + public static extern void ImageSetTimeStamp(NativeHandles.ImageHandle imageHandle, TimeStamp timestamp); + + // K4A_EXPORT void k4a_image_set_exposure_time_usec(k4a_image_t image_handle, uint64_t exposure_usec); + /// Set the exposure time, in microseconds, of the image. + /// Handle of the image to set the exposure time on. + /// Exposure time of the image in microseconds. + /// + /// Use this function in conjunction with + /// or to construct an image. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_set_exposure_time_usec", CallingConvention = CallingConvention.Cdecl)] + public static extern void ImageSetExposureTimeUsec(NativeHandles.ImageHandle imageHandle, ulong exposureUsec); + + // K4A_EXPORT void k4a_image_set_white_balance(k4a_image_t image_handle, uint32_t white_balance); + /// Set the white balance of the image. + /// Handle of the image to set the white balance on. + /// White balance of the image in degrees Kelvin. + /// + /// Use this function in conjunction with + /// or to construct an image. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_set_white_balance", CallingConvention = CallingConvention.Cdecl)] + public static extern void ImageSetWhiteBalance(NativeHandles.ImageHandle imageHandle, uint whiteBalance); + + // K4A_EXPORT void k4a_image_set_iso_speed(k4a_image_t image_handle, uint32_t iso_speed); + /// Set the ISO speed of the image. + /// Handle of the image to set the ISO speed on. + /// ISO speed of the image. + /// + /// Use this function in conjunction with + /// or to construct an image. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_image_set_iso_speed", CallingConvention = CallingConvention.Cdecl)] + public static extern void ImageSetIsoSpeed(NativeHandles.ImageHandle imageHandle, uint isoSpeed); + + // K4A_EXPORT k4a_result_t k4a_device_start_cameras(k4a_device_t device_handle, k4a_device_configuration_t *config); + /// Starts color and depth camera capture. + /// Handle obtained by . + /// The configuration we want to run the device in. This can be initialized with . + /// is returned on success. + /// + /// Individual sensors configured to run will now start to stream captured data. + /// + /// It is not valid to call this method a second time on the same device until has been called. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_start_cameras", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceStartCameras(NativeHandles.DeviceHandle deviceHandle, [In] ref DeviceConfiguration config); + + // K4A_EXPORT void k4a_device_stop_cameras(k4a_device_t device_handle); + /// Stops the color and depth camera capture. + /// Handle obtained by . + /// + /// The streaming of individual sensors stops as a result of this call. Once called, + /// may be called again to resume sensor streaming. + /// + /// This function may be called while another thread is blocking in . + /// Calling this function while another thread is in that function will result in that function returning a failure. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_stop_cameras", CallingConvention = CallingConvention.Cdecl)] + public static extern void DeviceStopCameras(NativeHandles.DeviceHandle deviceHandle); + + // K4A_EXPORT k4a_result_t k4a_device_start_imu(k4a_device_t device_handle); + /// Starts the IMU sample stream. + /// Handle obtained by . + /// is returned on success. + /// + /// Call this API to start streaming IMU data. It is not valid to call this function a second time on the same + /// device until has been called. + /// + /// This function is dependent on the state of the cameras. The color or depth camera must be started before the IMU. + /// will be returned if one of the cameras is not running. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_start_imu", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceStartImu(NativeHandles.DeviceHandle deviceHandle); + + + // K4A_EXPORT void k4a_device_stop_imu(k4a_device_t device_handle); + /// Stops the IMU capture. + /// Handle obtained by . + /// + /// The streaming of the IMU stops as a result of this call. Once called, may + /// be called again to resume sensor streaming, so long as the cameras are running. + /// + /// This function may be called while another thread is blocking in . + /// Calling this function while another thread is in that function will result in that function returning a failure. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_stop_imu", CallingConvention = CallingConvention.Cdecl)] + public static extern void DeviceStopImu(NativeHandles.DeviceHandle deviceHandle); + + // K4A_EXPORT k4a_buffer_result_t k4a_device_get_serialnum(k4a_device_t device_handle, + // char *serial_number, + // size_t *serial_number_size); + /// Get the Azure Kinect device serial number. + /// Handle obtained by . + /// + /// Location to write the serial number to. If the function returns , + /// this will be a NULL-terminated string of ASCII characters. + /// If this input is , will still be updated to return + /// the size of the buffer needed to store the string. + /// + /// + /// On input, the size of the buffer if that pointer is not . + /// On output, this value is set to the actual number of bytes in the serial number (including the null terminator). + /// + /// + /// A return of means that the has been filled in. + /// If the buffer is too small the function returns and the size of the serial number is + /// returned in the parameter. + /// All other failures return . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_serialnum", CallingConvention = CallingConvention.Cdecl, CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true)] + public static extern NativeApiCallResults.BufferResult DeviceGetSerialnum( + NativeHandles.DeviceHandle deviceHandle, + StringBuilder serialNumber, + ref UIntPtr serialNumberSize); + + // K4A_EXPORT k4a_result_t k4a_device_get_version(k4a_device_t device_handle, k4a_hardware_version_t *version); + /// Get the version numbers of the device's subsystems. + /// Handle obtained by . + /// Output parameter which on success will return version info. + /// + /// A return of means that the version structure has been filled in. + /// All other failures return . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_version", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceGetVersion( + NativeHandles.DeviceHandle deviceHandle, + out HardwareVersion version); + + // K4A_EXPORT k4a_result_t k4a_device_get_color_control_capabilities(k4a_device_t device_handle, + // k4a_color_control_command_t command, + // bool *supports_auto, + // int32_t *min_value, + // int32_t *max_value, + // int32_t *step_value, + // int32_t *default_value, + // k4a_color_control_mode_t *default_mode); + /// Get the Azure Kinect color sensor control capabilities. + /// Handle obtained by . + /// Color sensor control command. + /// Output: whether the color sensor's control support auto mode or not. if it supports auto mode, otherwise . + /// Output: the color sensor's control minimum value of . + /// Output: the color sensor's control maximum value of . + /// Output: the color sensor's control step value of . + /// Output: the color sensor's control default value of . + /// Output: the color sensor's control default mode of . + /// if the value was successfully returned, if an error occurred + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_color_control_capabilities", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceGetColorControlCapabilities( + NativeHandles.DeviceHandle deviceHandle, + ColorControlCommand command, + out bool supportsAuto, + out int minValue, + out int maxValue, + out int stepValue, + out int defaultValue, + out ColorControlMode defaultMode); + + // K4A_EXPORT k4a_result_t k4a_device_get_color_control(k4a_device_t device_handle, + // k4a_color_control_command_t command, + // k4a_color_control_mode_t *mode, + // int32_t *value); + /// Get the Azure Kinect color sensor control value. + /// Handle obtained by . + /// Color sensor control command. + /// This mode represents whether the command is in automatic or manual mode. + /// This value is always written, but is only valid when the returned is for the current . + /// if the value was successfully returned, if an error occurred. + /// + /// Each control command may be set to manual or automatic. See the definition of on + /// how to interpret the for each command. + /// + /// Some control commands are only supported in manual mode. When a command is in automatic mode, the for + /// that command is not valid. + /// + /// Control values set on a device are reset only when the device is power cycled. The device will retain the + /// settings even if the is closed or the application is restarted. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_color_control", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceGetColorControl( + NativeHandles.DeviceHandle deviceHandle, ColorControlCommand command, out ColorControlMode mode, out int value); + + // K4A_EXPORT k4a_result_t k4a_device_set_color_control(k4a_device_t device_handle, + // k4a_color_control_command_t command, + // k4a_color_control_mode_t mode, + // int32_t value); + /// Set the Azure Kinect color sensor control value. + /// Handle obtained by . + /// Color sensor control command. + /// Color sensor control mode to set. This mode represents whether the command is in automatic or manual mode. + /// + /// Value to set the color sensor's control to. The value is only valid if + /// is set to , and is otherwise ignored. + /// + /// if the value was successfully set, if an error occurred + /// + /// Each control command may be set to manual or automatic. See the definition of on how + /// to interpret the for each command. + /// + /// Some control commands are only supported in manual mode. When a command is in automatic mode, the for that + /// command is not valid. + /// + /// Control values set on a device are reset only when the device is power cycled. The device will retain the settings + /// even if the device is closed or the application is restarted. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_set_color_control", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceSetColorControl( + NativeHandles.DeviceHandle deviceHandle, + ColorControlCommand command, + ColorControlMode mode, + int value); + + // K4A_EXPORT k4a_buffer_result_t k4a_device_get_raw_calibration(k4a_device_t device_handle, + // uint8_t *data, + // size_t *data_size); + /// Get the raw calibration blob for the entire Azure Kinect device. + /// Handle obtained by . + /// + /// Location to write the calibration data to. This field may optionally be set to for the caller to query for + /// the needed data size. + /// + /// + /// On passing into the function this variable represents the available size of the + /// buffer. On return this variable is updated with the amount of data actually written to the buffer, or the size + /// required to store the calibration buffer if is . + /// + /// + /// if was successfully written. + /// If points to a buffer size that is + /// too small to hold the output or data is , is returned + /// and is updated to contain the minimum buffer size needed to capture the calibration data. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_raw_calibration", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.BufferResult DeviceGetRawCalibration( + NativeHandles.DeviceHandle deviceHandle, + [Out] byte[] data, + ref UIntPtr dataSize); + + // K4A_EXPORT k4a_result_t k4a_device_get_calibration(k4a_device_t device_handle, + // const k4a_depth_mode_t depth_mode, + // const k4a_color_resolution_t color_resolution, + // k4a_calibration_t *calibration); + /// Get the camera calibration for the entire Azure Kinect device. + /// Handle obtained by . + /// Mode in which depth camera is operated. + /// Resolution in which color camera is operated. + /// Output: calibration data. + /// if was successfully written. otherwise. + /// + /// The represents the data needed to transform between the camera views and may be + /// different for each operating and the device is configured to operate in. + /// + /// The output is used as input to all calibration and transformation functions. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_calibration", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceGetCalibration( + NativeHandles.DeviceHandle deviceHandle, + DepthMode depthMode, + ColorResolution colorResolution, + out Calibration calibration); + + // K4A_EXPORT k4a_result_t k4a_device_get_sync_jack(k4a_device_t device_handle, + // bool *sync_in_jack_connected, + // bool *sync_out_jack_connected); + /// Get the device jack status for the synchronization in and synchronization out connectors. + /// Handle obtained by . + /// Upon successful return this value will be set to true if a cable is connected to this sync in jack. + /// Upon successful return this value will be set to true if a cable is connected to this sync out jack. + /// if the connector status was successfully read. + /// + /// If is then mode can be set to + /// or . If is then + /// mode can be set to or . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_device_get_sync_jack", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result DeviceGetSyncJack( + NativeHandles.DeviceHandle deviceHandle, + out bool syncInJackConnected, + out bool syncOutJackConnected); + + // K4A_EXPORT k4a_result_t k4a_calibration_get_from_raw(char *raw_calibration, + // size_t raw_calibration_size, + // const k4a_depth_mode_t depth_mode, + // const k4a_color_resolution_t color_resolution, + // k4a_calibration_t *calibration); + /// Get the camera calibration for a device from a raw calibration blob. + /// Raw calibration blob obtained from a device or recording. The raw calibration must be NULL terminated. + /// The size, in bytes, of including the NULL termination. + /// Mode in which depth camera is operated. + /// Resolution in which color camera is operated. + /// Result: calibration data + /// if was successfully written. otherwise. + /// + /// The represents the data needed to transform between the camera views and is + /// different for each operating and the device is configured to operate in. + /// + /// The output is used as input to all transformation functions. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_calibration_get_from_raw", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result CalibrationGetFromRaw( + byte[] rawCalibration, + UIntPtr rawCalibrationSize, + DepthMode depthMode, + ColorResolution colorResolution, + out Calibration calibration); + + // K4A_EXPORT k4a_result_t k4a_calibration_3d_to_3d(const k4a_calibration_t *calibration, + // const k4a_float3_t *source_point3d_mm, + // const k4a_calibration_type_t source_camera, + // const k4a_calibration_type_t target_camera, + // k4a_float3_t *target_point3d_mm); + /// Transform a 3D point of a source coordinate system into a 3D point of the target coordinate system. + /// Camera calibration data. + /// The 3D coordinates in millimeters representing a point in . + /// The current camera. + /// The target camera. + /// Output: the new 3D coordinates of the input point in the coordinate space in millimeters. + /// + /// if was successfully written. + /// if contained invalid transformation parameters. + /// + /// + /// This function is used to transform 3D points between depth and color camera coordinate systems. The function uses the + /// extrinsic camera calibration. It computes the output via multiplication with a precomputed matrix encoding a 3D + /// rotation and a 3D translation. If and are the same, then will + /// be identical to . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_calibration_3d_to_3d", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result Calibration3DTo3D( + [In] ref Calibration calibration, + [In] ref Float3 sourcePoint3DMm, + CalibrationGeometry sourceCamera, + CalibrationGeometry targetCamera, + out Float3 targetPoint3DMm); + + // K4A_EXPORT k4a_result_t k4a_calibration_2d_to_3d(const k4a_calibration_t *calibration, + // const k4a_float2_t *source_point2d, + // const float source_depth_mm, + // const k4a_calibration_type_t source_camera, + // const k4a_calibration_type_t target_camera, + // k4a_float3_t *target_point3d_mm, + // int *valid); + /// + /// Transform a 2D pixel coordinate with an associated depth value of the source camera + /// into a 3D point of the target coordinate system. + /// + /// Camera calibration data. + /// The 2D pixel in coordinates. + /// The depth of in millimeters. + /// The current camera. + /// The target camera. + /// Output: the 3D coordinates of the input pixel in the coordinate system of in millimeters. + /// + /// The output parameter returns a value of if the is a valid coordinate, + /// and will return if the coordinate is not valid in the calibration model. + /// + /// + /// if was successfully written. + /// if + /// contained invalid transformation parameters. + /// If the function returns , but valid is , + /// the transformation was computed, but the results in are outside of the range of valid + /// calibration and should be ignored. + /// + /// + /// This function applies the intrinsic calibration of to compute the 3D ray from the focal point of the + /// camera through pixel .The 3D point on this ray is then found using . If + /// is different from , the 3D point is transformed to using + /// . + /// In practice, and will often be identical. In this + /// case, no 3D to 3D transformation is applied. + /// + /// If is not considered as valid pixel coordinate + /// according to the intrinsic camera model, is set to . + /// If it is valid, valid will be set to . The user + /// should not use the value of if was set to . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_calibration_2d_to_3d", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result Calibration2DTo3D( + [In] ref Calibration calibration, + [In] ref Float2 sourcePoint2D, + float sourceDepthMm, + CalibrationGeometry sourceCamera, + CalibrationGeometry targetCamera, + out Float3 targetPoint3DMm, + out bool valid); + + // K4A_EXPORT k4a_result_t k4a_calibration_3d_to_2d(const k4a_calibration_t *calibration, + // const k4a_float3_t *source_point3d_mm, + // const k4a_calibration_type_t source_camera, + // const k4a_calibration_type_t target_camera, + // k4a_float2_t *target_point2d, + // int *valid); + /// Transform a 3D point of a source coordinate system into a 2D pixel coordinate of the target camera. + /// Camera calibration data. + /// The 3D coordinates in millimeters representing a point in . + /// The current camera. + /// The target camera. + /// Output: the 2D pixel in coordinates. + /// + /// The output parameter returns if the is a valid coordinate in the + /// coordinate system, and will return if the coordinate is not valid in the calibration model. + /// + /// + /// if was successfully written. + /// if contained invalid transformation parameters. + /// If the function returns , but is , + /// the transformation was computed, but the results in are outside of the range of valid calibration + /// and should be ignored. + /// + /// + /// If is different from , is transformed + /// to using . + /// In practice, and will often be identical. + /// In this case, no 3D to 3D transformation is applied. The 3D point in the coordinate system of is then + /// projected onto the image plane using the intrinsic calibration of . + /// + /// If does not map to a valid 2D coordinate in the coordinate system, + /// is set to . If it is valid, will be set to . + /// The user should not use the value of if was set to . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_calibration_3d_to_2d", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result Calibration3DTo2D( + [In] ref Calibration calibration, + [In] ref Float3 sourcePoint3DMm, + CalibrationGeometry sourceCamera, + CalibrationGeometry targetCamera, + out Float2 targetPoint2D, + out bool valid); + + // K4A_EXPORT k4a_result_t k4a_calibration_2d_to_2d(const k4a_calibration_t *calibration, + // const k4a_float2_t *source_point2d, + // const float source_depth_mm, + // const k4a_calibration_type_t source_camera, + // const k4a_calibration_type_t target_camera, + // k4a_float2_t *target_point2d, + // int *valid); + /// + /// Transform a 2D pixel coordinate with an associated depth value of the source camera into a 2D pixel coordinate of the target camera. + /// + /// Camera calibration data. + /// The 2D pixel in coordinates. + /// The depth of in millimeters. + /// The current camera. + /// The target camera. + /// Output: the 2D pixel in coordinates. + /// + /// The output parameter returns if the is a valid coordinate in the + /// coordinate system, and will return if the coordinate is not valid in the calibration model. + /// + /// + /// if was successfully written. + /// if contained invalid transformation parameters. + /// If the function returns , but valid is , + /// the transformation was computed, but the results in are outside of the range of valid calibration + /// and should be ignored. + /// + /// + /// This function maps a pixel between the coordinate systems of the depth and color cameras. It is equivalent to calling + /// to compute the 3D point corresponding to and then using + /// to map the 3D point into the coordinate system of the . + /// + /// If and are identical, the function immediately sets to + /// and returns without computing any transformations. + /// + /// If does not map to a valid 2D coordinate in the coordinate system, + /// is set to . If it is valid, will be set to . + /// The user should not use the value of if was set to 0. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_calibration_2d_to_2d", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result Calibration2DTo2D( + [In] ref Calibration calibration, + [In] ref Float2 sourcePoint2D, + float sourceDepthMm, + CalibrationGeometry sourceCamera, + CalibrationGeometry targetCamera, + out Float2 targetPoint2D, + out bool valid); + + // K4A_EXPORT k4a_transformation_t k4a_transformation_create(const k4a_calibration_t *calibration); + /// Get handle to transformation. + /// Camera calibration data. + /// A transformation handle. An invalid handle is returned if creation fails. + /// + /// The transformation handle is used to transform images from the coordinate system of one camera into the other. Each + /// transformation handle requires some pre-computed resources to be allocated, which are retained until the handle is + /// destroyed. + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_transformation_create", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeHandles.TransformationHandle TransformationCreate([In] ref Calibration calibration); + + // K4A_EXPORT k4a_result_t k4a_transformation_depth_image_to_color_camera(k4a_transformation_t transformation_handle, + // const k4a_image_t depth_image, + // k4a_image_t transformed_depth_image); + /// Transforms the depth map into the geometry of the color camera. + /// Transformation handle. + /// Handle to input depth image. + /// Handle to output transformed depth image. + /// + /// if was successfully written + /// and otherwise. + /// + /// + /// This produces a depth image for which each pixel matches the corresponding pixel coordinates of the color camera. + /// + /// and must be of format . + /// + /// must have a width and height matching the width and height of the color camera in the mode + /// specified by the used to create the with . + /// + /// The contents will be filled with the depth values derived from in the color + /// camera's coordinate space. + /// + /// should be created by the caller using + /// or . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_transformation_depth_image_to_color_camera", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result TransformationDepthImageToColorCamera( + NativeHandles.TransformationHandle transformationHandle, + NativeHandles.ImageHandle depthImage, + NativeHandles.ImageHandle transformedDepthImage); + + // K4A_EXPORT k4a_result_t k4a_transformation_color_image_to_depth_camera(k4a_transformation_t transformation_handle, + // const k4a_image_t depth_image, + // const k4a_image_t color_image, + // k4a_image_t transformed_color_image); + /// Transforms a color image into the geometry of the depth camera. + /// Transformation handle. + /// Handle to input depth image. + /// Handle to input color image. + /// Handle to output transformed color image. + /// + /// if was successfully written + /// and otherwise. + /// + /// + /// This produces a color image for which each pixel matches the corresponding pixel coordinates of the depth camera. + /// + /// and need to represent the same moment in time. The depth data will be applied to the + /// color image to properly warp the color data to the perspective of the depth camera. + /// + /// must be of type . must be of format + /// . + /// + /// image must be of format . must + /// have the width and height of the depth camera in the mode specified by the used to create + /// the with . + /// + /// should be created by the caller using + /// or . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_transformation_color_image_to_depth_camera", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result TransformationColorImageToDepthCamera( + NativeHandles.TransformationHandle transformationHandle, + NativeHandles.ImageHandle depthImage, + NativeHandles.ImageHandle colorImage, + NativeHandles.ImageHandle transformedColorImage); + + // K4A_EXPORT k4a_result_t k4a_transformation_depth_image_to_point_cloud(k4a_transformation_t transformation_handle, + // const k4a_image_t depth_image, + // const k4a_calibration_type_t camera, + // k4a_image_t xyz_image); + /// Transforms the depth image into 3 planar images representing X, Y and Z-coordinates of corresponding 3D points. + /// Transformation handle. + /// Handle to input depth image. + /// Geometry in which depth map was computed. + /// Handle to output xyz image. + /// + /// if was successfully written + /// and otherwise. + /// + /// + /// must be of format . + /// + /// The parameter tells the function what the perspective of the is. + /// If the was captured directly from the depth camera, the value should be . + /// If the is the result of a transformation into the color camera's coordinate space using + /// , + /// the value should be . + /// + /// The format of must be . The width and height of must match the + /// width and height of . must have a stride in bytes of at least 6 times its width in pixels. + /// + /// Each pixel of the consists of three values, totaling 6 bytes. The three values are the + /// X, Y, and Z values of the point. + /// + /// should be created by the caller using + /// or . + /// + [DllImport(Sdk.K4A_DLL_NAME, EntryPoint = "k4a_transformation_depth_image_to_point_cloud", CallingConvention = CallingConvention.Cdecl)] + public static extern NativeApiCallResults.Result TransformationDepthImageToPointCloud( + NativeHandles.TransformationHandle transformationHandle, + NativeHandles.ImageHandle depthImage, + CalibrationGeometry camera, + NativeHandles.ImageHandle xyzImage); + } +} diff --git a/K4AdotNet/Sensor/FirmwareBuild.cs b/K4AdotNet/Sensor/FirmwareBuild.cs new file mode 100644 index 0000000..3790a4c --- /dev/null +++ b/K4AdotNet/Sensor/FirmwareBuild.cs @@ -0,0 +1,18 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_FIRMWARE_BUILD_RELEASE, + // K4A_FIRMWARE_BUILD_DEBUG + // } k4a_firmware_build_t; + /// Firmware build type. + public enum FirmwareBuild + { + /// Production firmware. + Release = 0, + + /// Pre-production firmware. + Debug, + } +} diff --git a/K4AdotNet/Sensor/FirmwareSignature.cs b/K4AdotNet/Sensor/FirmwareSignature.cs new file mode 100644 index 0000000..8b0739e --- /dev/null +++ b/K4AdotNet/Sensor/FirmwareSignature.cs @@ -0,0 +1,22 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_FIRMWARE_SIGNATURE_MSFT, + // K4A_FIRMWARE_SIGNATURE_TEST, + // K4A_FIRMWARE_SIGNATURE_UNSIGNED + // } k4a_firmware_signature_t; + /// Firmware signature type. + public enum FirmwareSignature + { + /// Microsoft signed firmware. + MicrosoftSignedFirmware = 0, + + /// Test signed firmware. + TestSignedFirmware, + + /// Unsigned firmware. + UnsignedFirmware, + } +} diff --git a/K4AdotNet/Sensor/FirmwareVersion.cs b/K4AdotNet/Sensor/FirmwareVersion.cs new file mode 100644 index 0000000..7e702e2 --- /dev/null +++ b/K4AdotNet/Sensor/FirmwareVersion.cs @@ -0,0 +1,167 @@ +using System; +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // typedef struct _k4a_version_t + // { + // uint32_t major; + // uint32_t minor; + // uint32_t iteration; + // } k4a_version_t; + /// + /// Version information about sensor firmware. + /// + [StructLayout(LayoutKind.Sequential)] + public struct FirmwareVersion : + IEquatable, IEquatable, + IComparable, IComparable, IComparable, + IFormattable + { + /// Major version; represents a breaking change. + public int Major; + + /// Minor version; represents additional features, no regression from lower versions with same major version. + public int Minor; + + /// Reserved. + public int Revision; + + /// Creates version with specified components. + /// Value for field . + /// Value for field . + /// Value for field . + public FirmwareVersion(int major, int minor, int revision) + { + Major = major; + Minor = minor; + Revision = revision; + } + + public FirmwareVersion(Version version) + { + if (version is null) + throw new ArgumentNullException(nameof(version)); + Major = version.Major; + Minor = version.Minor; + Revision = version.Build; + } + + public Version ToVersion() + => new Version(Major, Minor, Revision); + + public static implicit operator Version(FirmwareVersion version) + => version.ToVersion(); + + public static implicit operator FirmwareVersion(Version version) + => new FirmwareVersion(version); + + /// Per-component comparison of versions. + /// Version to be compared with this one. + /// true - versions are the same, false - versions are differ from each other. + public bool Equals(FirmwareVersion other) + => Major.Equals(other.Major) && Minor.Equals(other.Minor) && Revision.Equals(other.Revision); + + public bool Equals(Version other) + => Equals(new FirmwareVersion(other)); + + public override bool Equals(object obj) + { + if (obj is null) + return false; + if (obj is FirmwareVersion) + return Equals((FirmwareVersion)obj); + if (obj is Version) + return Equals((Version)obj); + return false; + } + + public static bool operator ==(FirmwareVersion left, FirmwareVersion right) + => left.Equals(right); + + public static bool operator !=(FirmwareVersion left, FirmwareVersion right) + => !left.Equals(right); + + public static bool operator ==(FirmwareVersion left, Version right) + => left.Equals(right); + + public static bool operator !=(FirmwareVersion left, Version right) + => !left.Equals(right); + + public static bool operator ==(Version left, FirmwareVersion right) + => new FirmwareVersion(left).Equals(right); + + public static bool operator !=(Version left, FirmwareVersion right) + => !new FirmwareVersion(left).Equals(right); + + public override int GetHashCode() + => unchecked(Major * 10000 + Minor * 100 + Revision); + + public int CompareTo(FirmwareVersion other) + { + var res = Major.CompareTo(other.Major); + if (res != 0) + return res; + res = Minor.CompareTo(other.Minor); + if (res != 0) + return res; + return Revision.CompareTo(other.Revision); + } + + public int CompareTo(Version other) + => CompareTo(new FirmwareVersion(other)); + + public int CompareTo(object obj) + { + if (obj is null) + return 1; + if (obj is FirmwareVersion) + return CompareTo((FirmwareVersion)obj); + if (obj is Version) + return CompareTo((Version)obj); + throw new ArgumentException("Object is not a FirmwareVersion or Version", nameof(obj)); + } + + public static bool operator <(FirmwareVersion left, FirmwareVersion right) + => left.CompareTo(right) < 0; + + public static bool operator <=(FirmwareVersion left, FirmwareVersion right) + => left.CompareTo(right) <= 0; + + public static bool operator >(FirmwareVersion left, FirmwareVersion right) + => left.CompareTo(right) > 0; + + public static bool operator >=(FirmwareVersion left, FirmwareVersion right) + => left.CompareTo(right) >= 0; + + public static bool operator <(FirmwareVersion left, Version right) + => left.CompareTo(right) < 0; + + public static bool operator <=(FirmwareVersion left, Version right) + => left.CompareTo(right) <= 0; + + public static bool operator >(FirmwareVersion left, Version right) + => left.CompareTo(right) > 0; + + public static bool operator >=(FirmwareVersion left, Version right) + => left.CompareTo(right) >= 0; + + public static bool operator <(Version left, FirmwareVersion right) + => new FirmwareVersion(left).CompareTo(right) < 0; + + public static bool operator <=(Version left, FirmwareVersion right) + => new FirmwareVersion(left).CompareTo(right) <= 0; + + public static bool operator >(Version left, FirmwareVersion right) + => new FirmwareVersion(left).CompareTo(right) > 0; + + public static bool operator >=(Version left, FirmwareVersion right) + => new FirmwareVersion(left).CompareTo(right) >= 0; + + public string ToString(string format, IFormatProvider formatProvider) + => $"{Major.ToString(format, formatProvider)}.{Minor.ToString(format, formatProvider)}.{Revision.ToString(format, formatProvider)}"; + + public override string ToString() + => $"{Major}.{Minor}.{Revision}"; + } +} diff --git a/K4AdotNet/Sensor/FrameRate.cs b/K4AdotNet/Sensor/FrameRate.cs new file mode 100644 index 0000000..87adde8 --- /dev/null +++ b/K4AdotNet/Sensor/FrameRate.cs @@ -0,0 +1,28 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_FRAMES_PER_SECOND_5 = 0, + // K4A_FRAMES_PER_SECOND_15, + // K4A_FRAMES_PER_SECOND_30, + // } k4a_fps_t; + /// Color and depth sensor frame rate. + /// + /// This enumeration is used to select the desired frame rate to operate the cameras. The actual + /// frame rate may vary slightly due to dropped data, synchronization variation between devices, + /// clock accuracy, or if the camera exposure priority mode causes reduced frame rate. + /// + /// + public enum FrameRate + { + /// Five (5) frames per second. + Five = 0, + + /// Fifteen (15) frames per second. + Fifteen, + + /// Thirty (30) frames per second. + Thirty, + } +} diff --git a/K4AdotNet/Sensor/FrameRateExtensions.cs b/K4AdotNet/Sensor/FrameRateExtensions.cs new file mode 100644 index 0000000..b0e6049 --- /dev/null +++ b/K4AdotNet/Sensor/FrameRateExtensions.cs @@ -0,0 +1,32 @@ +using System; + +namespace K4AdotNet.Sensor +{ + /// Helper extension and static methods for enumeration. + public static class FrameRateExtensions + { + /// Convert enumeration value to appropriate number of frames per second (Hz). + public static int ToNumberHz(this FrameRate frameRate) + { + switch (frameRate) + { + case FrameRate.Five: return 5; + case FrameRate.Fifteen: return 15; + case FrameRate.Thirty: return 30; + default: throw new ArgumentOutOfRangeException(nameof(frameRate)); + } + } + + /// Constructs enumeration value from appropriate number of frames per second (Hz). + public static FrameRate FromNumberHz(int frameRateHz) + { + switch (frameRateHz) + { + case 5: return FrameRate.Five; + case 15: return FrameRate.Fifteen; + case 30: return FrameRate.Thirty; + default: throw new ArgumentOutOfRangeException(nameof(frameRateHz)); + } + } + } +} diff --git a/K4AdotNet/Sensor/HardwareVersion.cs b/K4AdotNet/Sensor/HardwareVersion.cs new file mode 100644 index 0000000..cf2758c --- /dev/null +++ b/K4AdotNet/Sensor/HardwareVersion.cs @@ -0,0 +1,42 @@ +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef struct _k4a_hardware_version_t + // { + // k4a_version_t rgb; + // k4a_version_t depth; + // k4a_version_t audio; + // k4a_version_t depth_sensor; + // k4a_firmware_build_t firmware_build; + // k4a_firmware_signature_t firmware_signature; + // } k4a_hardware_version_t; + /// Structure to define hardware version. + [StructLayout(LayoutKind.Sequential)] + public struct HardwareVersion + { + /// Color camera firmware version. + [MarshalAs(UnmanagedType.Struct)] + public FirmwareVersion ColorCameraFirmwareVersion; + + /// Depth camera firmware version. + [MarshalAs(UnmanagedType.Struct)] + public FirmwareVersion DepthCamereFirmwareVersion; + + /// Audio device firmware version. + [MarshalAs(UnmanagedType.Struct)] + public FirmwareVersion AudioDeviceFirmwareVersion; + + /// Depth sensor firmware version. + [MarshalAs(UnmanagedType.Struct)] + public FirmwareVersion DepthSensorFirmwareVersion; + + /// Build type reported by the firmware. + public FirmwareBuild FirmwareBuild; + + /// Signature type of the firmware. + public FirmwareSignature FirmwareSignature; + + } +} diff --git a/K4AdotNet/Sensor/ImageFormat.cs b/K4AdotNet/Sensor/ImageFormat.cs new file mode 100644 index 0000000..5a2413b --- /dev/null +++ b/K4AdotNet/Sensor/ImageFormat.cs @@ -0,0 +1,89 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_IMAGE_FORMAT_COLOR_MJPG = 0, + // K4A_IMAGE_FORMAT_COLOR_NV12, + // K4A_IMAGE_FORMAT_COLOR_YUY2, + // K4A_IMAGE_FORMAT_COLOR_BGRA32, + // K4A_IMAGE_FORMAT_DEPTH16, + // K4A_IMAGE_FORMAT_IR16, + // K4A_IMAGE_FORMAT_CUSTOM, + // } k4a_image_format_t; + /// Image format type. + public enum ImageFormat + { + /// + /// The buffer for each image is encoded as a JPEG and can be decoded by a JPEG decoder. + /// + /// + /// Because the image is compressed, the stride parameter is not applicable. + /// Each MJPG encoded image in a stream may be of differing size depending on the compression efficiency. + /// + ColorMJPG = 0, + + /// + /// NV12 images separate the luminance and chroma data such that all the luminance is at the + /// beginning of the buffer, and the chroma lines follow immediately after. + /// + /// + /// Stride indicates the length of each line in bytes and should be used to determine the start location of each line + /// of the image in memory. Chroma has half as many lines of height and half the width in pixels of the luminance. + /// Each chroma line has the same width in bytes as a luminance line. + /// + ColorNV12, + + /// + /// YUY2 stores chroma and luminance data in interleaved pixels. + /// + /// + /// Stride indicates the length of each line in bytes and should be used to determine the start location of each + /// line of the image in memory. + /// + ColorYUY2, + + /// + /// Each pixel of BGRA32 data is four bytes. The first three bytes represent Blue, Green, + /// and Red data. The fourth byte is the alpha channel and is unused in the Azure Kinect APIs. + /// + /// + /// Stride indicates the length of each line in bytes and should be used to determine the start location of each + /// line of the image in memory. + /// + /// The Azure Kinect device does not natively capture in this format. Requesting images of this format + /// requires additional computation in the API. + /// + ColorBGRA32, + + /// + /// Each pixel of DEPTH16 data is two bytes of little endian unsigned depth data. The unit of the data is in + /// millimeters from the origin of the camera. + /// + /// + /// Stride indicates the length of each line in bytes and should be used to determine the start location of each + /// line of the image in memory. + /// + Depth16, + + /// + /// This format represents infrared light and is captured by the depth camera. + /// Each pixel of IR16 data is two bytes of little endian unsigned depth data. The value of the data represents + /// brightness. + /// + /// + /// Stride indicates the length of each line in bytes and should be used to determine the start location of each + /// line of the image in memory. + /// + IR16, + + /// + /// Custom image format. + /// Used in conjunction with user created images or images packing non-standard data. + /// + /// + /// See the originator of the custom formatted image for information on how to interpret the data. + /// + Custom, + } +} diff --git a/K4AdotNet/Sensor/ImuSample.cs b/K4AdotNet/Sensor/ImuSample.cs new file mode 100644 index 0000000..7a14859 --- /dev/null +++ b/K4AdotNet/Sensor/ImuSample.cs @@ -0,0 +1,35 @@ +using System.Runtime.InteropServices; + +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef struct _k4a_imu_sample_t + // { + // float temperature; + // k4a_float3_t acc_sample; + // uint64_t acc_timestamp_usec; + // k4a_float3_t gyro_sample; + // uint64_t gyro_timestamp_usec; + // } k4a_imu_sample_t; + /// IMU sample. + [StructLayout(LayoutKind.Sequential)] + public struct ImuSample + { + /// Temperature reading of this sample (Celsius). + public float Temperature; + + /// Accelerometer sample in meters per second squared. + [MarshalAs(UnmanagedType.Struct)] + public Float3 AccelerometerSample; + + /// Time stamp of the accelerometer. + public TimeStamp AccelerometerTimeStamp; + + /// Gyro sample in radians per second. + [MarshalAs(UnmanagedType.Struct)] + public Float3 GyroSample; + + /// Time stamp of the gyroscope in microseconds. + public long GyroTimeStamp; + } +} diff --git a/K4AdotNet/Sensor/WiredSyncMode.cs b/K4AdotNet/Sensor/WiredSyncMode.cs new file mode 100644 index 0000000..59ff6ab --- /dev/null +++ b/K4AdotNet/Sensor/WiredSyncMode.cs @@ -0,0 +1,25 @@ +namespace K4AdotNet.Sensor +{ + // Defined in k4atypes.h: + // typedef enum + // { + // K4A_WIRED_SYNC_MODE_STANDALONE, + // K4A_WIRED_SYNC_MODE_MASTER, + // K4A_WIRED_SYNC_MODE_SUBORDINATE + // } k4a_wired_sync_mode_t; + /// Synchronization mode when connecting two or more devices together. + public enum WiredSyncMode + { + /// Neither 'Sync In' or 'Sync Out' connections are used. + Standalone = 0, + + /// The 'Sync Out' jack is enabled and synchronization data it driven out the connected wire. + Master, + + /// + /// The 'Sync In' jack is used for synchronization and 'Sync Out' is driven for the + /// next device in the chain. 'Sync Out' is a mirror of 'Sync In' for this mode. + /// + Subordinate, + } +} diff --git a/K4AdotNet/TimeStamp.cs b/K4AdotNet/TimeStamp.cs new file mode 100644 index 0000000..1e6b90e --- /dev/null +++ b/K4AdotNet/TimeStamp.cs @@ -0,0 +1,180 @@ +using System; + +namespace K4AdotNet +{ + public struct TimeStamp : + IEquatable, IEquatable, IEquatable, + IComparable, IComparable, IComparable, IComparable, + IFormattable + { + public long ValueUsec; + + public TimeStamp(long valueUsec) + => ValueUsec = valueUsec; + + public TimeStamp(TimeSpan value) + => ValueUsec = value.Ticks / UsecToTimeSpanTicksFactor; + + public TimeSpan ToTimeSpan() + => TimeSpan.FromTicks(ValueUsec * UsecToTimeSpanTicksFactor); + + public bool Equals(TimeStamp other) + => ValueUsec.Equals(other.ValueUsec); + + public bool Equals(TimeSpan other) + => Equals(new TimeStamp(other)); + + public bool Equals(long otherUsec) + => ValueUsec.Equals(otherUsec); + + public int CompareTo(TimeStamp other) + => ValueUsec.CompareTo(other.ValueUsec); + + public int CompareTo(TimeSpan other) + => CompareTo(new TimeStamp(other)); + + public int CompareTo(long otherUsec) + => ValueUsec.CompareTo(otherUsec); + + public int CompareTo(object obj) + { + if (obj is null) + return 1; + if (obj is TimeStamp) + return CompareTo((TimeStamp)obj); + if (obj is TimeSpan) + return CompareTo((TimeSpan)obj); + if (obj is IConvertible) + return CompareTo(Convert.ToInt64(obj)); + throw new ArgumentException("Object is not a TimeStamp or TimeSpan or integer number", nameof(obj)); + } + + public string ToString(string format, IFormatProvider formatProvider) + => ValueUsec.ToString(format, formatProvider) + " usec"; + + public override bool Equals(object obj) + { + if (obj is null) + return false; + if (obj is TimeStamp) + return Equals((TimeStamp)obj); + if (obj is TimeSpan) + return Equals((TimeSpan)obj); + if (obj is IConvertible) + return Equals(Convert.ToInt64(obj)); + return false; + } + + public override int GetHashCode() + => ValueUsec.GetHashCode(); + + public override string ToString() + => ValueUsec.ToString() + " usec"; + + public static bool operator ==(TimeStamp left, TimeStamp right) + => left.Equals(right); + + public static bool operator !=(TimeStamp left, TimeStamp right) + => !left.Equals(right); + + public static bool operator <(TimeStamp left, TimeStamp right) + => left.CompareTo(right) < 0; + + public static bool operator >(TimeStamp left, TimeStamp right) + => left.CompareTo(right) > 0; + + public static bool operator <=(TimeStamp left, TimeStamp right) + => left.CompareTo(right) <= 0; + + public static bool operator >=(TimeStamp left, TimeStamp right) + => left.CompareTo(right) >= 0; + + public static bool operator ==(TimeStamp left, TimeSpan right) + => left.Equals(right); + + public static bool operator !=(TimeStamp left, TimeSpan right) + => !left.Equals(right); + + public static bool operator <(TimeStamp left, TimeSpan right) + => left.CompareTo(right) < 0; + + public static bool operator >(TimeStamp left, TimeSpan right) + => left.CompareTo(right) > 0; + + public static bool operator <=(TimeStamp left, TimeSpan right) + => left.CompareTo(right) <= 0; + + public static bool operator >=(TimeStamp left, TimeSpan right) + => left.CompareTo(right) >= 0; + + public static bool operator ==(TimeSpan left, TimeStamp right) + => new TimeStamp(left).Equals(right); + + public static bool operator !=(TimeSpan left, TimeStamp right) + => !new TimeStamp(left).Equals(right); + + public static bool operator <(TimeSpan left, TimeStamp right) + => new TimeStamp(left).CompareTo(right) < 0; + + public static bool operator >(TimeSpan left, TimeStamp right) + => new TimeStamp(left).CompareTo(right) > 0; + + public static bool operator <=(TimeSpan left, TimeStamp right) + => new TimeStamp(left).CompareTo(right) <= 0; + + public static bool operator >=(TimeSpan left, TimeStamp right) + => new TimeStamp(left).CompareTo(right) >= 0; + + public static bool operator ==(TimeStamp left, long rightUsec) + => left.Equals(rightUsec); + + public static bool operator !=(TimeStamp left, long rightUsec) + => !left.Equals(rightUsec); + + public static bool operator <(TimeStamp left, long rightUsec) + => left.CompareTo(rightUsec) < 0; + + public static bool operator >(TimeStamp left, long rightUsec) + => left.CompareTo(rightUsec) > 0; + + public static bool operator <=(TimeStamp left, long rightUsec) + => left.CompareTo(rightUsec) <= 0; + + public static bool operator >=(TimeStamp left, long rightUsec) + => left.CompareTo(rightUsec) >= 0; + + public static bool operator ==(long leftUsec, TimeStamp right) + => new TimeStamp(leftUsec).Equals(right); + + public static bool operator !=(long leftUsec, TimeStamp right) + => !new TimeStamp(leftUsec).Equals(right); + + public static bool operator <(long leftUsec, TimeStamp right) + => new TimeStamp(leftUsec).CompareTo(right) < 0; + + public static bool operator >(long leftUsec, TimeStamp right) + => new TimeStamp(leftUsec).CompareTo(right) > 0; + + public static bool operator <=(long leftUsec, TimeStamp right) + => new TimeStamp(leftUsec).CompareTo(right) <= 0; + + public static bool operator >=(long leftUsec, TimeStamp right) + => new TimeStamp(leftUsec).CompareTo(right) >= 0; + + public static implicit operator TimeSpan(TimeStamp value) + => value.ToTimeSpan(); + + public static implicit operator TimeStamp(TimeSpan value) + => new TimeStamp(value); + + public static implicit operator long(TimeStamp value) + => value.ValueUsec; + + public static implicit operator TimeStamp(long valueUsec) + => new TimeStamp(valueUsec); + + public static readonly TimeStamp Zero = new TimeStamp(0); + + internal static readonly long UsecToTimeSpanTicksFactor = TimeSpan.TicksPerSecond / 1000000L; + } +} diff --git a/K4AdotNet/Timeout.cs b/K4AdotNet/Timeout.cs new file mode 100644 index 0000000..af47755 --- /dev/null +++ b/K4AdotNet/Timeout.cs @@ -0,0 +1,191 @@ +using System; +using System.Runtime.InteropServices; + +namespace K4AdotNet +{ + [StructLayout(LayoutKind.Sequential)] + public struct Timeout : + IEquatable, IEquatable, IEquatable, + IComparable, IComparable, IComparable, IComparable, + IFormattable + { + public int ValueMs; + + public Timeout(int valueMs) + => ValueMs = valueMs; + + public Timeout(TimeSpan value) + => ValueMs = checked((int)value.TotalMilliseconds); + + public TimeSpan ToTimeSpan() + => TimeSpan.FromMilliseconds(ValueMs); + + public bool Equals(Timeout other) + => ValueMs.Equals(other.ValueMs); + + public bool Equals(TimeSpan other) + => Equals(new Timeout(other)); + + public bool Equals(int otherMs) + => ValueMs.Equals(otherMs); + + public int CompareTo(Timeout other) + => unchecked((uint)ValueMs).CompareTo(unchecked((uint)ValueMs)); + + public int CompareTo(TimeSpan other) + => CompareTo(new Timeout(other)); + + public int CompareTo(int otherMs) + => CompareTo(new Timeout(otherMs)); + + public int CompareTo(object obj) + { + if (obj is null) + return 1; + if (obj is Timeout) + return CompareTo((Timeout)obj); + if (obj is TimeSpan) + return CompareTo((TimeSpan)obj); + if (obj is IConvertible) + return CompareTo(Convert.ToInt32(obj)); + throw new ArgumentException("Object is not a Timeout or TimeSpan or integer number", nameof(obj)); + } + + public string ToString(string format, IFormatProvider formatProvider) + { + if (ValueMs < 0) + return nameof(Infinite); + if (ValueMs == 0) + return nameof(NoWait); + return ValueMs.ToString(format, formatProvider) + " ms"; + } + + public override bool Equals(object obj) + { + if (obj is null) + return false; + if (obj is Timeout) + return Equals((Timeout)obj); + if (obj is TimeSpan) + return Equals((TimeSpan)obj); + if (obj is IConvertible) + return Equals(Convert.ToInt32(obj)); + return false; + } + + public override int GetHashCode() + => ValueMs; + + public override string ToString() + { + if (ValueMs < 0) + return nameof(Infinite); + if (ValueMs == 0) + return nameof(NoWait); + return ValueMs.ToString() + " ms"; + } + + public static bool operator ==(Timeout left, Timeout right) + => left.Equals(right); + + public static bool operator !=(Timeout left, Timeout right) + => !left.Equals(right); + + public static bool operator <(Timeout left, Timeout right) + => left.CompareTo(right) < 0; + + public static bool operator <=(Timeout left, Timeout right) + => left.CompareTo(right) <= 0; + + public static bool operator >(Timeout left, Timeout right) + => left.CompareTo(right) < 0; + + public static bool operator >=(Timeout left, Timeout right) + => left.CompareTo(right) >= 0; + + public static bool operator ==(Timeout left, TimeSpan right) + => left.Equals(right); + + public static bool operator !=(Timeout left, TimeSpan right) + => !left.Equals(right); + + public static bool operator <(Timeout left, TimeSpan right) + => left.CompareTo(right) < 0; + + public static bool operator <=(Timeout left, TimeSpan right) + => left.CompareTo(right) <= 0; + + public static bool operator >(Timeout left, TimeSpan right) + => left.CompareTo(right) < 0; + + public static bool operator >=(Timeout left, TimeSpan right) + => left.CompareTo(right) >= 0; + + public static bool operator ==(TimeSpan left, Timeout right) + => new Timeout(left).Equals(right); + + public static bool operator !=(TimeSpan left, Timeout right) + => !new Timeout(left).Equals(right); + + public static bool operator <(TimeSpan left, Timeout right) + => new Timeout(left).CompareTo(right) < 0; + + public static bool operator <=(TimeSpan left, Timeout right) + => new Timeout(left).CompareTo(right) <= 0; + + public static bool operator >(TimeSpan left, Timeout right) + => new Timeout(left).CompareTo(right) < 0; + + public static bool operator >=(TimeSpan left, Timeout right) + => new Timeout(left).CompareTo(right) >= 0; + + public static bool operator ==(Timeout left, int rightMs) + => left.Equals(rightMs); + + public static bool operator !=(Timeout left, int rightMs) + => !left.Equals(rightMs); + + public static bool operator <(Timeout left, int rightMs) + => left.CompareTo(rightMs) < 0; + + public static bool operator <=(Timeout left, int rightMs) + => left.CompareTo(rightMs) <= 0; + + public static bool operator >(Timeout left, int rightMs) + => left.CompareTo(rightMs) < 0; + + public static bool operator >=(Timeout left, int rightMs) + => left.CompareTo(rightMs) >= 0; + + public static bool operator ==(int leftMs, Timeout rightMs) + => new Timeout(leftMs).Equals(rightMs); + + public static bool operator !=(int leftMs, Timeout rightMs) + => !new Timeout(leftMs).Equals(rightMs); + + public static bool operator <(int leftMs, Timeout right) + => new Timeout(leftMs).CompareTo(right) < 0; + + public static bool operator <=(int leftMs, Timeout right) + => new Timeout(leftMs).CompareTo(right) <= 0; + + public static bool operator >(int leftMs, Timeout right) + => new Timeout(leftMs).CompareTo(right) < 0; + + public static bool operator >=(int leftMs, Timeout right) + => new Timeout(leftMs).CompareTo(right) >= 0; + + public static implicit operator TimeSpan(Timeout value) + => value.ToTimeSpan(); + + public static implicit operator Timeout(TimeSpan value) + => new Timeout(value); + + public static implicit operator Timeout(int value) + => new Timeout(value); + + public static readonly Timeout NoWait = new Timeout(0); + + public static readonly Timeout Infinite = new Timeout(-1); + } +} diff --git a/LICENSE b/LICENSE index e25a304..15bc72f 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2019 bibigone +Copyright (c) 2019 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 850e5d3..e21e52b 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,4 @@ # k4a.net K4A.Net - Simple .Net wrapper for Kinect for Azure (K4A) Sensor SDK and Body Tracking SDK + +Current status: under development.