我有一个来自相机的 UYVY 数据缓冲区,我正在使用 GSSF Directshow.net 过滤器将缓冲区推送到图形中。
目前的图表是
GSSF -> YUV Transform -> AVI Splitter -> Video Renderer
该图正确计算颜色并正确显示它,但图像中的条不应该存在,我不确定它们来自哪里。盯着图像看我的眼睛很痛。
此函数获取 UYVY 缓冲区(mainbyte)并将其复制到整数数组
unsafe public void ImageFromPixels__()
{
byte[] x = mainbyte;
long fff = 720 * 1280;
mainptr = new IntPtr(fff);
for (int p = 0; p < 720 * 1280; p++)
{
U = (x[p * 4 + 0]);
Y = (x[p * 4 + 1]);
V = (x[p * 4 + 2]);
Y2 = (x[p * 4 + 3]);
// int one = V << 16 | Y << 8 | U;
// int two = V << 16 | Y2 << 8 | U;
int one = Y2 << 24 | V << 16 | Y << 8 | U;
// mainint[p * 2 + 0] = one;
// mainint[p * 2 + 1] = two;
mainint[p] = one;
}
m_FPS = UNIT / 20;
m_b = 211;
m_g = 197;
}
此函数采用相同的整数数组并将其打包到 GSSF 流指针中
override unsafe public int GetImage(int iFrameNumber, IntPtr ip, int iSize, out int iRead)
{
int hr = 0;
if (iFrameNumber>-1)
{
if (iFrameNumber < MAXFRAMES)
{
ImageFromPixels_(20, mainbyte);
m_g += 3;
m_b += 7;
int* bp = (int*)ip.ToPointer();
Random k = new Random();
StreamReader s = new StreamReader("jpegFile.txt");
for (int f = 0; f < HEIGHT; f++)
{
for (int x = 0; x < (WIDTH); x += 1)
{
*(bp + (f * WIDTH) + x) = mainint[f * 1280 + x];
}
}
}
else
{
hr = 1; // End of stream
}
}
iRead = iSize;
return hr;
}
这为 GSSF 的输出引脚设置了位图压缩,我想我可能在这里做错了,但它看起来是正确的。
override public void SetMediaType(IGenericSampleConfig psc)
{
BitmapInfoHeader bmi = new BitmapInfoHeader();
// Build a BitmapInfo struct using the parms from the file
bmi.Size = Marshal.SizeOf(typeof(BitmapInfoHeader));
bmi.Width = WIDTH;
bmi.Height = HEIGHT * -1;
bmi.Planes = 1;
bmi.BitCount = BPP;
bmi.Compression = 0x59565955; //UYVY
bmi.ImageSize = (bmi.BitCount / 8) * bmi.Width * bmi.Height;
bmi.XPelsPerMeter = 0;
bmi.YPelsPerMeter = 0;
bmi.ClrUsed = 0;
bmi.ClrImportant = 0;
int hr = psc.SetMediaTypeFromBitmap(bmi, m_FPS);
DsError.ThrowExceptionForHR(hr);
}
更新 改变了它
override unsafe public int GetImage(int iFrameNumber, IntPtr ip, int iSize, out int iRead)
{
int hr = 0;
if (iFrameNumber>-1)
{
if (iFrameNumber < MAXFRAMES)
{
ImageFromPixels_(20, mainbyte);
m_g += 3;
m_b += 7;
int* bp = (int*)ip.ToPointer();
Random k = new Random();
StreamReader s = new StreamReader("jpegFile.txt");
for (int f = 0; f < 720; f++)
{
for (int x = 0; x < (1280); x += 1)
{
*(bp + (f * 1280) + x) = mainint[f * 1280 + x];
}
}
}
else
{
hr = 1; // End of stream
}
}
// 覆盖 public void SetMediaType(IGenericSampleConfig psc) { BitmapInfoHeader bmi = new BitmapInfoHeader();
// Build a BitmapInfo struct using the parms from the file
bmi.Size = Marshal.SizeOf(typeof(BitmapInfoHeader));
bmi.Width = WIDTH;
bmi.Height = HEIGHT * -1;
bmi.Planes = 1;
bmi.BitCount = BPP;
bmi.Compression = 0x59565955;
bmi.ImageSize = (bmi.BitCount / 8) * bmi.Width * bmi.Height;
bmi.XPelsPerMeter = 0;
bmi.YPelsPerMeter = 0;
bmi.ClrUsed = 0;
bmi.ClrImportant = 0;
int hr = psc.SetMediaTypeFromBitmap(bmi, m_FPS);
DsError.ThrowExceptionForHR(hr);
}
//
unsafe public void ImageFromPixels_(long FPS, byte[] x)
{
long fff = 720 * 1280 * 3;
mainptr = new IntPtr(fff);
for (int p = 0; p < 720 * 640; p++)
{
U = (x[ p * 4 + 0]);
Y = (x[p * 4 + 1]);
V = (x[p * 4 + 2]);
Y2 = (x[p * 4 + 3]);
int one = Y2 << 24 | V << 16 | Y << 8 | U;
//int one = V << 16 | Y << 8 | U;
//int two = V << 16 | Y2 << 8 | U;
//mainint[p * 2 + 0] = one;
//mainint[p * 2 + 1] = two;
mainint[p] = one;
}
m_FPS = UNIT / FPS;
m_b = 211;
m_g = 197;
}
如果我更改 GetImage 中的其他数字以更改视频的高度,或者如果我在 ImagePixel 中更改它,那么我只会得到黑屏:|