这是在 Unity 项目中完成的。
我目前正在尝试创建一个新的 Azure Kinect Capture 对象,并用 K2 创建的颜色、深度和 IR 帧的数据填充它。由于当时不应该使用 Azure Kinect 摄像头,因此我创建了一个伪造的 Azure Kinect 校准,用于创建 Azure Kinect Bodytracking Tracker。
我遇到了一个问题,如果从 Kinect v2 中提取的数据成功入队,项目会挂起,如果调用弹出入队的数据,项目会在运行后立即挂起。我在 enqueue 和 pop 中添加了超时,这修复了冻结,但是弹出的 BodyTracking Frame 对象从不包含正文。我已经设置了一个场景,其中深度数据被可视化以确保它没有被扭曲或模糊并且看起来很好。
在我继续尝试完成这项工作之前,我想看看我是否在这里遗漏了什么,或者我正在做的事情是否可能。
假校准:
Calibration cal = new Calibration {
DepthCameraCalibration = new CameraCalibration {
Extrinsics = new Extrinsics {
Rotation = new float[] { 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f },
Translation = new float[] { 0.0f, 0.0f, 0.0f }
},
Intrinsics = new Intrinsics {
Type = CalibrationModelType.BrownConrady,
ParameterCount = 14,
Parameters = new float[] { 264.902374f, 261.016541f, 251.993011f, 252.0128f, 0.5496079f, -0.0305904336f, -0.00340628251f, 0.893285751f, 0.07668319f, -0.01748066f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }
},
ResolutionWidth = 512,
ResolutionHeight = 512,
MetricRadius = 1.73999977f
},
ColorCameraCalibration = new CameraCalibration {
Extrinsics = new Extrinsics {
Rotation = new float[] { 0.9999973f, 0.00189682352f, -0.00130836014f, -0.00179401657f, 0.997216046f, 0.07454452f, 0.00144611555f, -0.07454198f, 0.9972168f },
Translation = new float[] { -32.1138039f, -2.46932817f, 3.97587371f }
},
Intrinsics = new Intrinsics {
Type = CalibrationModelType.BrownConrady,
ParameterCount = 14,
Parameters = new float[] { 957.2569f, 551.9336f, 913.142334f, 913.1438f, 0.4421505f, -2.83680415f, 1.73018765f, 0.32017225f, -2.644007f, 1.643955f, 0.0f, 0.0f, -0.000281378743f, 0.000288581447f, 0.0f }
},
ResolutionWidth = 1920,
ResolutionHeight = 1080,
MetricRadius = 1.7f
},
DeviceExtrinsics = new Extrinsics[] { //Device Extrinsics calibration chunk
new Extrinsics(){ Rotation = new float[] { 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f }, Translation = new float[] { 0.0f, 0.0f, 0.0f } },
new Extrinsics(){ Rotation = new float[] { 0.9999973f, 0.00189682352f, -0.00130836014f, -0.00179401657f, 0.997216046f, 0.07454452f, 0.00144611555f, -0.07454198f, 0.9972168f }, Translation = new float[] { -32.1138039f, -2.46932817f, 3.97587371f } },
new Extrinsics(){ Rotation = new float[] { -0.000347044057f, 0.110655256f, -0.9938588f, -0.999971569f, -0.007524097f, -0.000488546968f, -0.00753195f, 0.9938304f, 0.110654727f }, Translation = new float[] { 0.0f, 0.0f, 0.0f } },
new Extrinsics(){ Rotation = new float[] { 0.00211483915f, 0.106267117f, -0.994335353f, -0.999981642f, -0.005419674f, -0.00270606228f, -0.00567653868f, 0.994322836f, 0.1062537f }, Translation = new float[] { -51.137455f, 3.33257771f, 0.7745425f } },
new Extrinsics(){ Rotation = new float[] { 0.9999973f, -0.00179401657f, 0.00144611555f, 0.00189682352f, 0.997216046f, -0.07454198f, -0.00130836014f, 0.07454452f, 0.9972168f }, Translation = new float[] { 32.10354f, 2.81973743f, -3.82274985f } },
new Extrinsics(){ Rotation = new float[] { 0.99999994f, 0.0f, 0.0f, 0.0f, 0.99999994f, 0.0f, 0.0f, 0.0f, 1.0f }, Translation = new float[] { 0.0f, 0.0f, 0.0f } },
new Extrinsics(){ Rotation = new float[] { 0.00116317568f, 0.0362610966f, -0.9993417f, -0.9999825f, -0.005745603f, -0.00137240067f, -0.00579158543f, 0.9993258f, 0.03625378f }, Translation = new float[] { 4.100151f, -32.1219749f, 2.13753319f } },
new Extrinsics(){ Rotation = new float[] { 0.00361735234f, 0.0318452343f, -0.999486268f, -0.9999857f, -0.00381232449f, -0.00374062685f, -0.0039294865f, 0.9994855f, 0.0318309739f }, Translation = new float[] { -46.96882f, -28.77531f, 2.98985362f } },
new Extrinsics(){ Rotation = new float[] { -0.000347044057f, -0.999971569f, -0.00753195f, 0.110655256f, -0.007524097f, 0.9938304f, -0.9938588f, -0.000488546968f, 0.110654727f }, Translation = new float[] { 0.0f, 0.0f, 0.0f } },
new Extrinsics(){ Rotation = new float[] { 0.00116317568f, -0.9999825f, -0.00579158543f, 0.0362610966f, -0.005745603f, 0.9993258f, -0.9993417f, -0.00137240067f, 0.03625378f }, Translation = new float[] { -32.1138039f, -2.46932817f, 3.97587371f } },
new Extrinsics(){ Rotation = new float[] { 1.00000012f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.00000012f }, Translation = new float[] { 0.0f, 0.0f, 0.0f } },
new Extrinsics(){ Rotation = new float[] { 0.999987245f, -0.00242856354f, -0.0044323504f, 0.002436766f, 0.9999953f, 0.00184613629f, 0.00442783535f, -0.00185691414f, 0.9999885f }, Translation = new float[] { -51.137455f, 3.33257771f, 0.7745425f } },
new Extrinsics(){ Rotation = new float[] { 0.00211483915f, -0.999981642f, -0.00567653868f, 0.106267117f, -0.005419674f, 0.994322836f, -0.994335353f, -0.00270606228f, 0.1062537f }, Translation = new float[] { 3.44506049f, 4.682146f, -50.92106f } },
new Extrinsics(){ Rotation = new float[] { 0.00361735234f, -0.9999857f, -0.0039294865f, 0.0318452343f, -0.00381232449f, 0.9994855f, -0.999486268f, -0.00374062685f, 0.0318309739f }, Translation = new float[] { -28.5932484f, -1.602283f, -47.1475f } },
new Extrinsics(){ Rotation = new float[] { 0.999987245f, 0.002436766f, 0.00442783535f, -0.00242856354f, 0.9999953f, -0.00185691414f, -0.0044323504f, 0.00184613629f, 0.9999885f }, Translation = new float[] { 51.125248f, -3.45531416f, -1.0073452f } },
new Extrinsics(){ Rotation = new float[] { 0.99999994f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f }, Translation = new float[] { 0.0f, 0.0f, 0.0f } }
},
DepthMode = DepthMode.WFOV_2x2Binned,
ColorResolution = ColorResolution.R1080p
};
#endregion
tracker = Tracker.Create(cal, new TrackerConfiguration() {
SensorOrientation = SensorOrientation.Default,
ProcessingMode = TrackerProcessingMode.Gpu,
GpuDeviceId = 0
});
身体追踪过程的深度:
timeSinceStart = DateTime.Now.Subtract(timeOfStart);
colorFrame = colorFrameReader.AcquireLatestFrame();
depthFrame = depthFrameReader.AcquireLatestFrame();
irFrame = infraredFrameReader.AcquireLatestFrame();
KinectCapture capture = new KinectCapture();
if (colorFrame != null) {
Image colorImage = new Image(ImageFormat.ColorBGRA32, colorFrame.FrameDescription.Width, colorFrame.FrameDescription.Height) {
DeviceTimestamp = timeSinceStart
};
colorImage.SystemTimestampNsec = timeSinceStart.Milliseconds * 1000000;
if (colorBuffer == null) {
FrameDescription description = colorFrame.ColorFrameSource.FrameDescription;
colorBuffer = new byte[description.BytesPerPixel * description.Width * description.Height];
}
colorFrame.CopyRawFrameDataToArray(colorBuffer);
for (int i = 0; i < 2073600; i++) {
ushort uShort = BitConverter.ToUInt16(colorBuffer, i * 2);
colorImage.SetPixel<ushort>(i % 1080, i / 1080, uShort);
}
capture.Color = colorImage;
colorFrame.Dispose();
}
if (depthFrame != null) {
Image depthImage = new Image(ImageFormat.Depth16, 512, 512);
depthImage.DeviceTimestamp = timeSinceStart;
depthImage.SystemTimestampNsec = timeSinceStart.Milliseconds * 1000000;
if (depthBuffer == null) {
var description = depthFrame.DepthFrameSource.FrameDescription;
depthBuffer = new ushort[description.Width * description.Height];
}
depthFrame.CopyFrameDataToArray(depthBuffer);
for (int i = 0; i < 262144; i++) {
if(i < depthBuffer.Length)
{
depthImage.SetPixel<ushort>(i % 512, i / 512, depthBuffer[i]);
} else
{
depthImage.SetPixel<ushort>(i % 512, i / 512, 0);
}
}
capture.Depth = depthImage;
depthFrame.Dispose();
}
if (irFrame != null) {
Image irImage = new Image(ImageFormat.IR16, 512, 512);
irImage.DeviceTimestamp = timeSinceStart;
irImage.SystemTimestampNsec = timeSinceStart.Milliseconds * 1000000;
if (irBuffer == null) {
var description = irFrame.InfraredFrameSource.FrameDescription;
irBuffer = new ushort[description.Width * description.Height];
}
irFrame.CopyFrameDataToArray(depthBuffer);
for (int i = 0; i < 262144; i++) {
if (i < irBuffer.Length)
{
irImage.SetPixel<ushort>(i % 512, i / 512, irBuffer[i]);
}
else
{
irImage.SetPixel<ushort>(i % 512, i / 512, 0);
}
}
capture.IR = irImage;
irFrame.Dispose();
}
capture.Temperature = 30.0f;
try {
if(capture.Color != null && capture.Depth != null && capture.IR != null)
{
tracker.EnqueueCapture(capture, new TimeSpan(0, 0, 0, 0, 50));
Debug.Log("Successful Enqueue");
}
} catch (Exception ex) {
Debug.Log($"Failed to enqeue\n{ex.Message}");
}
try {
kFrame = tracker.PopResult(new TimeSpan(0, 0, 0, 15));
Debug.Log("Bodies in frame: " + kFrame.NumberOfBodies);
}
catch (Exception ex) {
Debug.Log($"Failed to pop from queue\n{ex.Message}");
}