很长一段时间以来,我一直在尝试将深度流与 Kinect 的 RGB 流对齐。我已经阅读了几篇关于它的文章,但我必须错过一些关键点,因为我无法让它工作......
这是我设法做的图片,我圈出了一些容易发现的错位
我一直在尝试将其简化为尽可能小的代码,但它仍然是一大段代码,所以请耐心等待,下面的代码片段是 Kinect SDK 每次准备好深度和 RGB 帧时调用的代码。
如您所见,我一直在尝试
ColorImagePoint colorpoint = _Sensor.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, depthpoint, ColorImageFormat.RgbResolution640x480Fps30);
我更喜欢使用 CoordinateMapper.MapDepthFrameToColorFrame (因为这应该能够解决问题)但我无法让它工作..我可能没有正确地做它..
我正在使用微软的 Kinect SDK 1.6
private void EventAllFramesReady(Object Sender, AllFramesReadyEventArgs e)
{
System.Drawing.Color color;
Bitmap image = null;
Bitmap depth = null;
using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
{
using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
{
// color
image = new Bitmap(colorFrame.Width, colorFrame.Height);
byte[] colorPixels = new byte[colorFrame.PixelDataLength];
colorFrame.CopyPixelDataTo(colorPixels);
//lock bitmap, and work with BitmapData (way faster than SetPixel())
BitmapData imageBitmapData = image.LockBits(new Rectangle(0, 0, image.Width, image.Height),
ImageLockMode.WriteOnly,
image.PixelFormat);
IntPtr IptrImage = imageBitmapData.Scan0;
byte[] PixelsImage = new byte[image.Width * image.Height * 4];
// depth
depth = new Bitmap(depthFrame.Width, depthFrame.Height);
DepthImagePixel[] depthData = new DepthImagePixel[depthFrame.PixelDataLength];
depthFrame.CopyDepthImagePixelDataTo(depthData);
//lock bitmap, and work with BitmapData (way faster than SetPixel())
BitmapData depthBitmapData = depth.LockBits(new Rectangle(0, 0, depth.Width, depth.Height),
ImageLockMode.WriteOnly,
depth.PixelFormat);
IntPtr IptrDepth = depthBitmapData.Scan0;
byte[] PixelsDepth = new byte[depth.Width * depth.Height * 4];
DepthImagePoint depthpoint = new DepthImagePoint();
for (int x = 1; x < colorFrame.Width; x++)
{
for (int y = 1; y < colorFrame.Height; y++)
{
int i = ((y * image.Width) + x) * 4;
short depthdistanceRAW = (depthData[x + y * depth.Width]).Depth;
// convert distance value into a color
color = System.Drawing.Color.Pink;
if (depthdistanceRAW > 0 && depthdistanceRAW <= 4000)
{
int depthdistance = (int)((depthdistanceRAW / 4090f) * 255f);
color = System.Drawing.Color.FromArgb((int)(depthdistance / 2f), depthdistance, (int)(depthdistance * 0.7f));
}
depthpoint.X = x;
depthpoint.Y = y;
depthpoint.Depth = depthdistanceRAW;
ColorImagePoint colorpoint = _Sensor.CoordinateMapper.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, depthpoint, ColorImageFormat.RgbResolution640x480Fps30);
//if (colorpoint.X > 0 && colorpoint.X <= 640 && colorpoint.Y > 0 && colorpoint.Y <= 480)
//{
int adjustedposition = ((colorpoint.Y * image.Width) + colorpoint.X) * 4;
//if (adjustedposition < depthData.Length)
//{
PixelsDepth[i] = color.B;
PixelsDepth[i + 1] = color.G;
PixelsDepth[i + 2] = color.R;
PixelsDepth[i + 3] = DepthTransparency;
//}
//}
PixelsImage[i] = colorPixels[i];
PixelsImage[i + 1] = colorPixels[i + 1];
PixelsImage[i + 2] = colorPixels[i + 2];
PixelsImage[i + 3] = 255;
}
}
Marshal.Copy(PixelsImage, 0, IptrImage, PixelsImage.Length);
image.UnlockBits(imageBitmapData);
Marshal.Copy(PixelsDepth, 0, IptrDepth, PixelsDepth.Length);
depth.UnlockBits(depthBitmapData);
}
}
_kf.UpdateImage(image); // update the RGB picture in the form
_kf.UpdateDepth(depth); // update the Depth picture in the form
}