3

发行说明提到尚无法在 Unity 中访问颜色数据,但我相信您可以将回调附加到视频覆盖侦听器并在 Unity 中接收 YUV12 字节数组并手动将其转换为 RGB565 以显示在屏幕上,也许带着色器。

有没有人在 Unity 中成功访问和显示颜色数据(比如在 AR 叠加中)?如果是这样,您能否分享一些关于如何开始的指针/代码?

4

4 回答 4

3

The video overlay callback works in Unity. Theoretically, you could have the YUV to RGBA shaders and render it in Unity. The only downside of doing that is it will required multiple "memcpy" due to the way Unity texture/color formatting.

The YV12 data packing of Tango is explained in the C-API header: https://developers.google.com/project-tango/apis/c/struct_tango_image_buffer

It says: "Y samples of width x height are first, followed by V samples, with half the stride and half the lines of the Y data, followed by a U samples with the same dimensions as the V sample."

So I think you could have three Unity Texture each include one of the Y,U,V channel, and pass them into a single shader to compute the RGBA color..

More information about YUV image and the conversion: http://en.wikipedia.org/wiki/YUV

于 2014-11-27T21:15:44.037 回答
3

我使用以下脚本在 Unity 四边形网格上显示视频叠加层。适用于探戈平板电脑。但是fps很低,我猜是因为彩色相机的分辨率(1280 x 720)。

using UnityEngine;
using UnityEngine.UI;
using System;
using System.Collections;
using System.Runtime.InteropServices;
using Tango;

public class CameraBackgorund : VideoOverlayListener
{
    Texture2D backgroundTexture = null;

    // texture data
    bool    isDirty = false;
    byte[]  yuv12   = null;
    int     width;
    int     height;

    private void Update() 
    {
        if (isDirty)
        {
            if (backgroundTexture == null)
                backgroundTexture = new Texture2D (width, height);

            // convert from YV12 to RGB
            int size = (int)(width * height);
            for (int i = 0; i < height; ++i)
            {
                for (int j = 0; j < width; ++j)
                {      
                    byte y = yuv12[i * width + j];
                    byte v = yuv12[(i / 2) * (width / 2) + (j / 2) + size];
                    byte u = yuv12[(i / 2) * (width / 2) + (j / 2) + size + (size / 4)];
                    backgroundTexture.SetPixel(j, height - i - 1, YUV2Color(y, u, v));
                }
            }

            // update texture
            backgroundTexture.Apply(true);
            GetComponent<MeshRenderer> ().material.mainTexture = backgroundTexture;

            isDirty = false;
        }
    }

    protected override void _OnImageAvailable(IntPtr callbackContext,
                                              Tango.TangoEnums.TangoCameraId cameraId, 
                                              Tango.TangoImageBuffer imageBuffer)
    {
        if (cameraId != Tango.TangoEnums.TangoCameraId.TANGO_CAMERA_COLOR)
            return;

        // allocate for the first time
        width = (int)imageBuffer.width;
        height = (int)imageBuffer.height;
        if (yuv12 == null)
            yuv12 = new byte[width * height * 2];

        // copy data in yv12 format
        IntPtr dataPtr = imageBuffer.data;
        int    offset  = 0;
        Int64  stride  = (Int64)imageBuffer.stride;
        for (int i = 0; i < height; ++i, dataPtr = new IntPtr(dataPtr.ToInt64() + stride), offset += width)
            Marshal.Copy(dataPtr, yuv12, offset, width);
        for (int i = 0; i < height / 2; ++i, dataPtr = new IntPtr(dataPtr.ToInt64() + stride / 2), offset += width / 2)
            Marshal.Copy(dataPtr, yuv12, offset, width / 2);
        for (int i = 0; i < height / 2; ++i, dataPtr = new IntPtr(dataPtr.ToInt64() + stride / 2), offset += width / 2)
            Marshal.Copy(dataPtr, yuv12, offset, width / 2);
        isDirty = true;
    }

    public static Color YUV2Color(byte y, byte u, byte v)
    {
        // http://en.wikipedia.org/wiki/YUV
        const float Umax = 0.436f;
        const float Vmax = 0.615f;

        float y_scaled = y / 255.0f;
        float u_scaled = 2 * (u / 255.0f - 0.5f) * Umax;
        float v_scaled = 2 * (v / 255.0f - 0.5f) * Vmax; 

        return new Color(y_scaled + 1.13983f * v_scaled,
                         y_scaled - 0.39465f * u_scaled - 0.58060f * v_scaled,
                         y_scaled + 2.03211f * u_scaled);
    }
}

使用 Unity WebCamTexture 成功检索颜色,但破坏了深度提供程序。

于 2014-12-27T02:39:29.003 回答
0

您可以使用 WebCamTexture.GetPixel 获取点位置的颜色并使用该信息制作纹理。 http://docs.unity3d.com/ScriptReference/WebCamTexture.GetPixel.html

于 2014-12-21T18:38:48.057 回答
0

您可以使用以下代码将 YV12 转换为 2D 纹理

private void YV12_To_Texture2D(byte[] data, uint width, uint height, out Texture2D tex)
{
    tex = new Texture2D((int)width, (int)height);

    uint size = width * height;

    for (int i = 0; i < height; i++)
    {
        for (int j = 0; j < width; j++)
        {
            int x_index = j;
            if (j % 2 != 0)
            {
                x_index = j - 1;
            }

            // Get the YUV color for this pixel.
            int yValue = data[(i * width) + j];
            int uValue = data[size + ((i / 2) * width) + x_index + 1];
            int vValue = data[size + ((i / 2) * width) + x_index];

            // Convert the YUV value to RGB.
            float r = yValue + (1.370705f * (vValue - 128));
            float g = yValue - (0.689001f * (vValue - 128)) - (0.337633f * (uValue - 128));
            float b = yValue + (1.732446f * (uValue - 128));

            Color co = new Color();
            co.b = b < 0 ? 0 : (b > 255 ? 1 : b / 255.0f);
            co.g = g < 0 ? 0 : (g > 255 ? 1 : g / 255.0f);
            co.r = r < 0 ? 0 : (r > 255 ? 1 : r / 255.0f);
            co.a = 1.0f;

            tex.SetPixel((int)width - j - 1, (int)height - i - 1, co);
        }
    }
}
于 2017-08-24T11:05:30.720 回答